instruction
stringclasses 1
value | input
stringlengths 31
235k
| output
class label 2
classes |
---|---|---|
Categorize the following code snippet as vulnerable or not. True or False | Oid getBaseType ( Oid typid ) {
int32 typmod = - 1 ;
return getBaseTypeAndTypmod ( typid , & typmod ) ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | TEST_F ( SyncBookmarkDataTypeControllerTest , StartFirstRun ) {
CreateBookmarkModel ( LOAD_MODEL ) ;
SetStartExpectations ( ) ;
SetAssociateExpectations ( ) ;
EXPECT_CALL ( * model_associator_ , SyncModelHasUserCreatedNodes ( _ ) ) . WillRepeatedly ( DoAll ( SetArgumentPointee < 0 > ( false ) , Return ( true ) ) ) ;
EXPECT_CALL ( start_callback_ , Run ( DataTypeController : : OK_FIRST_RUN , _ , _ ) ) ;
Start ( ) ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static xmlLinkPtr xmlListLowerSearch ( xmlListPtr l , void * data ) {
xmlLinkPtr lk ;
if ( l == NULL ) return ( NULL ) ;
for ( lk = l -> sentinel -> next ;
lk != l -> sentinel && l -> linkCompare ( lk -> data , data ) < 0 ;
lk = lk -> next ) ;
return lk ;
} | 1True
|
Categorize the following code snippet as vulnerable or not. True or False | static const char * c_escape_str ( AVBPrint * dst , const char * src , const char sep , void * log_ctx ) {
const char * p ;
for ( p = src ;
* p ;
p ++ ) {
switch ( * p ) {
case '\b' : av_bprintf ( dst , "%s" , "\\b" ) ;
break ;
case '\f' : av_bprintf ( dst , "%s" , "\\f" ) ;
break ;
case '\n' : av_bprintf ( dst , "%s" , "\\n" ) ;
break ;
case '\r' : av_bprintf ( dst , "%s" , "\\r" ) ;
break ;
case '\\' : av_bprintf ( dst , "%s" , "\\\\" ) ;
break ;
default : if ( * p == sep ) av_bprint_chars ( dst , '\\' , 1 ) ;
av_bprint_chars ( dst , * p , 1 ) ;
}
}
return dst -> str ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static int64_t le4 ( const unsigned char * p ) {
return ( ( p [ 0 ] << 16 ) + ( ( ( int64_t ) p [ 1 ] ) << 24 ) + ( p [ 2 ] << 0 ) + ( p [ 3 ] << 8 ) ) ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static bool MakeTempFile ( const base : : FilePath & dir , const base : : FilePath & file_name , base : : FilePath * full_path ) {
* full_path = dir . Append ( file_name ) ;
return base : : WriteFile ( * full_path , "" , 0 ) == 0 ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static void interpolate ( float * out , float v1 , float v2 , int size ) {
int i ;
float step = ( v1 - v2 ) / ( size + 1 ) ;
for ( i = 0 ;
i < size ;
i ++ ) {
v2 += step ;
out [ i ] = v2 ;
}
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static char * _catkey ( char * buffer , int32_t set_num , int32_t msg_num ) {
int32_t i = 0 ;
i = T_CString_integerToString ( buffer , set_num , 10 ) ;
buffer [ i ++ ] = SEPARATOR ;
T_CString_integerToString ( buffer + i , msg_num , 10 ) ;
return buffer ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static int dissect_h245_IV16 ( tvbuff_t * tvb _U_ , int offset _U_ , asn1_ctx_t * actx _U_ , proto_tree * tree _U_ , int hf_index _U_ ) {
offset = dissect_per_octet_string ( tvb , offset , actx , tree , hf_index , 16 , 16 , FALSE , NULL ) ;
return offset ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static void update_offset_hash_table_from_kb ( OffsetHashTable tbl , KBNODE node , off_t off ) {
for ( ;
node ;
node = node -> next ) {
if ( node -> pkt -> pkttype == PKT_PUBLIC_KEY || node -> pkt -> pkttype == PKT_PUBLIC_SUBKEY ) {
u32 aki [ 2 ] ;
keyid_from_pk ( node -> pkt -> pkt . public_key , aki ) ;
update_offset_hash_table ( tbl , aki , off ) ;
}
}
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static void _UTF7Reset ( UConverter * cnv , UConverterResetChoice choice ) {
if ( choice <= UCNV_RESET_TO_UNICODE ) {
cnv -> toUnicodeStatus = 0x1000000 ;
cnv -> toULength = 0 ;
}
if ( choice != UCNV_RESET_TO_UNICODE ) {
cnv -> fromUnicodeStatus = ( cnv -> fromUnicodeStatus & 0xf0000000 ) | 0x1000000 ;
}
} | 1True
|
Categorize the following code snippet as vulnerable or not. True or False | static void vdpau_h264_set_rf ( VdpReferenceFrameH264 * rf , Picture * pic , int pic_structure ) {
VdpVideoSurface surface = ff_vdpau_get_surface_id ( pic ) ;
if ( pic_structure == 0 ) pic_structure = pic -> reference ;
rf -> surface = surface ;
rf -> is_long_term = pic -> reference && pic -> long_ref ;
rf -> top_is_reference = ( pic_structure & PICT_TOP_FIELD ) != 0 ;
rf -> bottom_is_reference = ( pic_structure & PICT_BOTTOM_FIELD ) != 0 ;
rf -> field_order_cnt [ 0 ] = h264_foc ( pic -> field_poc [ 0 ] ) ;
rf -> field_order_cnt [ 1 ] = h264_foc ( pic -> field_poc [ 1 ] ) ;
rf -> frame_idx = pic -> long_ref ? pic -> pic_id : pic -> frame_num ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | SPL_METHOD ( DirectoryIterator , key ) {
spl_filesystem_object * intern = ( spl_filesystem_object * ) zend_object_store_get_object ( getThis ( ) TSRMLS_CC ) ;
if ( zend_parse_parameters_none ( ) == FAILURE ) {
return ;
}
if ( intern -> u . dir . dirp ) {
RETURN_LONG ( intern -> u . dir . index ) ;
}
else {
RETURN_FALSE ;
}
} | 1True
|
Categorize the following code snippet as vulnerable or not. True or False | static FILE * checkdupstoredtable ( SplineFont * sf , uint32 tag , int * len , struct alltabs * all , int me ) {
int i ;
struct ttf_table * tab = SFFindTable ( sf , tag ) , * test ;
if ( tab == NULL ) {
* len = 0 ;
return ( NULL ) ;
}
for ( i = 0 ;
i < me ;
++ i ) {
test = SFFindTable ( all [ i ] . sf , tag ) ;
if ( test != NULL && test -> len == tab -> len && memcmp ( test -> data , tab -> data , tab -> len ) == 0 ) {
* len = i ;
return ( ( FILE * ) ( intpt ) - 1 ) ;
}
}
return ( dumpstoredtable ( sf , tag , len ) ) ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | int compare_key_tok_id ( const void * a1 , const void * a2 ) {
const struct key_tok * p1 = a1 ;
const struct key_tok * p2 = a2 ;
if ( p1 -> token == p2 -> token ) return 0 ;
if ( p1 -> token < p2 -> token ) return - 1 ;
else return 1 ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | void vp9_reset_segment_features ( struct segmentation * seg ) {
seg -> enabled = 0 ;
seg -> update_map = 0 ;
seg -> update_data = 0 ;
vpx_memset ( seg -> tree_probs , 255 , sizeof ( seg -> tree_probs ) ) ;
vp9_clearall_segfeatures ( seg ) ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static void sig_window_print_info ( WINDOW_REC * win ) {
GUI_WINDOW_REC * gui ;
gui = WINDOW_GUI ( win ) ;
if ( gui -> use_scroll ) {
printformat_window ( win , MSGLEVEL_CLIENTCRAP , TXT_WINDOW_INFO_SCROLL , gui -> scroll ? "yes" : "no" ) ;
}
if ( WINDOW_MAIN ( win ) -> sticky_windows ) windows_print_sticky ( win ) ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static bool tsc_adjust_needed ( void * opaque ) {
X86CPU * cpu = opaque ;
CPUX86State * env = & cpu -> env ;
return env -> tsc_adjust != 0 ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static void decode_ppc ( TwinContext * tctx , int period_coef , const float * shape , float ppc_gain , float * speech ) {
const ModeTab * mtab = tctx -> mtab ;
int isampf = tctx -> avctx -> sample_rate / 1000 ;
int ibps = tctx -> avctx -> bit_rate / ( 1000 * tctx -> avctx -> channels ) ;
int min_period = ROUNDED_DIV ( 40 * 2 * mtab -> size , isampf ) ;
int max_period = ROUNDED_DIV ( 6 * 40 * 2 * mtab -> size , isampf ) ;
int period_range = max_period - min_period ;
int period = min_period + ROUNDED_DIV ( period_coef * period_range , ( 1 << mtab -> ppc_period_bit ) - 1 ) ;
int width ;
if ( isampf == 22 && ibps == 32 ) {
width = ROUNDED_DIV ( ( period + 800 ) * mtab -> peak_per2wid , 400 * mtab -> size ) ;
}
else width = ( period ) * mtab -> peak_per2wid / ( 400 * mtab -> size ) ;
add_peak ( period , width , shape , ppc_gain , speech , mtab -> ppc_shape_len ) ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | png_uint_32 PNGAPI png_permit_mng_features ( png_structp png_ptr , png_uint_32 mng_features ) {
png_debug ( 1 , "in png_permit_mng_features" ) ;
if ( png_ptr == NULL ) return ( png_uint_32 ) 0 ;
png_ptr -> mng_features_permitted = ( png_byte ) ( mng_features & PNG_ALL_MNG_FEATURES ) ;
return ( png_uint_32 ) png_ptr -> mng_features_permitted ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | Node * make_and_qual ( Node * qual1 , Node * qual2 ) {
if ( qual1 == NULL ) return qual2 ;
if ( qual2 == NULL ) return qual1 ;
return ( Node * ) make_andclause ( list_make2 ( qual1 , qual2 ) ) ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | kadm5_ret_t kadm5_get_principal_keys ( void * server_handle , krb5_principal principal , krb5_keyblock * * keyblocks , int * n_keys ) {
krb5_db_entry * kdb ;
osa_princ_ent_rec adb ;
kadm5_ret_t ret ;
kadm5_server_handle_t handle = server_handle ;
if ( keyblocks ) * keyblocks = NULL ;
CHECK_HANDLE ( server_handle ) ;
if ( principal == NULL ) return EINVAL ;
if ( ( ret = kdb_get_entry ( handle , principal , & kdb , & adb ) ) ) return ( ret ) ;
if ( keyblocks ) {
ret = decrypt_key_data ( handle -> context , kdb -> n_key_data , kdb -> key_data , keyblocks , n_keys ) ;
if ( ret ) goto done ;
}
ret = KADM5_OK ;
done : kdb_free_entry ( handle , kdb , & adb ) ;
return ret ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static int __attribute_noinline__ link_exists2_p ( const char * dir , size_t dirlen , const char * fname , glob_t * pglob # ifndef _LIBC , int flags # endif ) {
size_t fnamelen = strlen ( fname ) ;
char * fullname = ( char * ) __alloca ( dirlen + 1 + fnamelen + 1 ) ;
struct stat st ;
# ifndef _LIBC struct_stat64 st64 ;
# endif mempcpy ( mempcpy ( mempcpy ( fullname , dir , dirlen ) , "/" , 1 ) , fname , fnamelen + 1 ) ;
# ifdef _LIBC return ( * pglob -> gl_stat ) ( fullname , & st ) == 0 ;
# else return ( ( __builtin_expect ( flags & GLOB_ALTDIRFUNC , 0 ) ? ( * pglob -> gl_stat ) ( fullname , & st ) : __stat64 ( fullname , & st64 ) ) == 0 ) ;
# endif } | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | void vp9_cyclic_refresh_setup ( VP9_COMP * const cpi ) {
VP9_COMMON * const cm = & cpi -> common ;
const RATE_CONTROL * const rc = & cpi -> rc ;
CYCLIC_REFRESH * const cr = cpi -> cyclic_refresh ;
struct segmentation * const seg = & cm -> seg ;
unsigned char * const seg_map = cpi -> segmentation_map ;
const int apply_cyclic_refresh = apply_cyclic_refresh_bitrate ( cm , rc ) ;
if ( ! apply_cyclic_refresh || ( cm -> frame_type == KEY_FRAME ) || ( cpi -> svc . temporal_layer_id > 0 ) ) {
vpx_memset ( seg_map , 0 , cm -> mi_rows * cm -> mi_cols ) ;
vp9_disable_segmentation ( & cm -> seg ) ;
if ( cm -> frame_type == KEY_FRAME ) cr -> sb_index = 0 ;
return ;
}
else {
int qindex_delta = 0 ;
int i , block_count , bl_index , sb_rows , sb_cols , sbs_in_frame ;
int xmis , ymis , x , y , qindex2 ;
const float rate_ratio_qdelta = 2.0 ;
const double q = vp9_convert_qindex_to_q ( cm -> base_qindex , cm -> bit_depth ) ;
vp9_clear_system_state ( ) ;
cr -> max_sbs_perframe = 10 ;
cr -> max_qdelta_perc = 50 ;
cr -> min_block_size = BLOCK_8X8 ;
cr -> time_for_refresh = 1 ;
cr -> thresh_rate_sb = ( rc -> sb64_target_rate * 256 ) >> 2 ;
cr -> thresh_dist_sb = 8 * ( int ) ( q * q ) ;
if ( cpi -> sf . use_nonrd_pick_mode ) {
cr -> thresh_rate_sb = ( rc -> sb64_target_rate * 256 ) >> 3 ;
cr -> thresh_dist_sb = 4 * ( int ) ( q * q ) ;
}
cr -> num_seg_blocks = 0 ;
vpx_memset ( seg_map , 0 , cm -> mi_rows * cm -> mi_cols ) ;
vp9_enable_segmentation ( & cm -> seg ) ;
vp9_clearall_segfeatures ( seg ) ;
seg -> abs_delta = SEGMENT_DELTADATA ;
vp9_disable_segfeature ( seg , 0 , SEG_LVL_ALT_Q ) ;
vp9_enable_segfeature ( seg , 1 , SEG_LVL_ALT_Q ) ;
qindex_delta = vp9_compute_qdelta_by_rate ( rc , cm -> frame_type , cm -> base_qindex , rate_ratio_qdelta , cm -> bit_depth ) ;
if ( - qindex_delta > cr -> max_qdelta_perc * cm -> base_qindex / 100 ) qindex_delta = - cr -> max_qdelta_perc * cm -> base_qindex / 100 ;
qindex2 = clamp ( cm -> base_qindex + cm -> y_dc_delta_q + qindex_delta , 0 , MAXQ ) ;
cr -> rdmult = vp9_compute_rd_mult ( cpi , qindex2 ) ;
vp9_set_segdata ( seg , 1 , SEG_LVL_ALT_Q , qindex_delta ) ;
sb_cols = ( cm -> mi_cols + MI_BLOCK_SIZE - 1 ) / MI_BLOCK_SIZE ;
sb_rows = ( cm -> mi_rows + MI_BLOCK_SIZE - 1 ) / MI_BLOCK_SIZE ;
sbs_in_frame = sb_cols * sb_rows ;
block_count = cr -> max_sbs_perframe * sbs_in_frame / 100 ;
assert ( cr -> sb_index < sbs_in_frame ) ;
i = cr -> sb_index ;
do {
int sum_map = 0 ;
int sb_row_index = ( i / sb_cols ) ;
int sb_col_index = i - sb_row_index * sb_cols ;
int mi_row = sb_row_index * MI_BLOCK_SIZE ;
int mi_col = sb_col_index * MI_BLOCK_SIZE ;
assert ( mi_row >= 0 && mi_row < cm -> mi_rows ) ;
assert ( mi_col >= 0 && mi_col < cm -> mi_cols ) ;
bl_index = mi_row * cm -> mi_cols + mi_col ;
xmis = MIN ( cm -> mi_cols - mi_col , num_8x8_blocks_wide_lookup [ BLOCK_64X64 ] ) ;
ymis = MIN ( cm -> mi_rows - mi_row , num_8x8_blocks_high_lookup [ BLOCK_64X64 ] ) ;
for ( y = 0 ;
y < ymis ;
y ++ ) {
for ( x = 0 ;
x < xmis ;
x ++ ) {
const int bl_index2 = bl_index + y * cm -> mi_cols + x ;
if ( cr -> map [ bl_index2 ] == 0 ) {
seg_map [ bl_index2 ] = 1 ;
sum_map ++ ;
}
else if ( cr -> map [ bl_index2 ] < 0 ) {
cr -> map [ bl_index2 ] ++ ;
}
}
}
if ( sum_map > 0 && sum_map < xmis * ymis ) {
const int new_value = ( sum_map >= xmis * ymis / 2 ) ;
for ( y = 0 ;
y < ymis ;
y ++ ) for ( x = 0 ;
x < xmis ;
x ++ ) seg_map [ bl_index + y * cm -> mi_cols + x ] = new_value ;
}
i ++ ;
if ( i == sbs_in_frame ) {
i = 0 ;
}
if ( sum_map >= xmis * ymis / 2 ) block_count -- ;
}
while ( block_count && i != cr -> sb_index ) ;
cr -> sb_index = i ;
}
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static void decode_band_structure ( GetBitContext * gbc , int blk , int eac3 , int ecpl , int start_subband , int end_subband , const uint8_t * default_band_struct , int * num_bands , uint8_t * band_sizes ) {
int subbnd , bnd , n_subbands , n_bands = 0 ;
uint8_t bnd_sz [ 22 ] ;
uint8_t coded_band_struct [ 22 ] ;
const uint8_t * band_struct ;
n_subbands = end_subband - start_subband ;
if ( ! eac3 || get_bits1 ( gbc ) ) {
for ( subbnd = 0 ;
subbnd < n_subbands - 1 ;
subbnd ++ ) {
coded_band_struct [ subbnd ] = get_bits1 ( gbc ) ;
}
band_struct = coded_band_struct ;
}
else if ( ! blk ) {
band_struct = & default_band_struct [ start_subband + 1 ] ;
}
else {
return ;
}
if ( num_bands || band_sizes ) {
n_bands = n_subbands ;
bnd_sz [ 0 ] = ecpl ? 6 : 12 ;
for ( bnd = 0 , subbnd = 1 ;
subbnd < n_subbands ;
subbnd ++ ) {
int subbnd_size = ( ecpl && subbnd < 4 ) ? 6 : 12 ;
if ( band_struct [ subbnd - 1 ] ) {
n_bands -- ;
bnd_sz [ bnd ] += subbnd_size ;
}
else {
bnd_sz [ ++ bnd ] = subbnd_size ;
}
}
}
if ( num_bands ) * num_bands = n_bands ;
if ( band_sizes ) memcpy ( band_sizes , bnd_sz , n_bands ) ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | void vp9_rc_postencode_update_drop_frame ( VP9_COMP * cpi ) {
update_buffer_level ( cpi , 0 ) ;
cpi -> common . last_frame_type = cpi -> common . frame_type ;
cpi -> rc . frames_since_key ++ ;
cpi -> rc . frames_to_key -- ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static int find_odc_header ( struct archive_read * a ) {
const void * h ;
const char * p , * q ;
size_t skip , skipped = 0 ;
ssize_t bytes ;
for ( ;
;
) {
h = __archive_read_ahead ( a , odc_header_size , & bytes ) ;
if ( h == NULL ) return ( ARCHIVE_FATAL ) ;
p = h ;
q = p + bytes ;
if ( memcmp ( "070707" , p , 6 ) == 0 && is_octal ( p , odc_header_size ) ) return ( ARCHIVE_OK ) ;
if ( memcmp ( "070727" , p , 6 ) == 0 && is_afio_large ( p , bytes ) ) {
a -> archive . archive_format = ARCHIVE_FORMAT_CPIO_AFIO_LARGE ;
return ( ARCHIVE_OK ) ;
}
while ( p + odc_header_size <= q ) {
switch ( p [ 5 ] ) {
case '7' : if ( ( memcmp ( "070707" , p , 6 ) == 0 && is_octal ( p , odc_header_size ) ) || ( memcmp ( "070727" , p , 6 ) == 0 && is_afio_large ( p , q - p ) ) ) {
skip = p - ( const char * ) h ;
__archive_read_consume ( a , skip ) ;
skipped += skip ;
if ( p [ 4 ] == '2' ) a -> archive . archive_format = ARCHIVE_FORMAT_CPIO_AFIO_LARGE ;
if ( skipped > 0 ) {
archive_set_error ( & a -> archive , 0 , "Skipped %d bytes before " "finding valid header" , ( int ) skipped ) ;
return ( ARCHIVE_WARN ) ;
}
return ( ARCHIVE_OK ) ;
}
p += 2 ;
break ;
case '0' : p ++ ;
break ;
default : p += 6 ;
break ;
}
}
skip = p - ( const char * ) h ;
__archive_read_consume ( a , skip ) ;
skipped += skip ;
}
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | void hmp_hostfwd_add ( Monitor * mon , const QDict * qdict ) {
const char * redir_str ;
SlirpState * s ;
const char * arg1 = qdict_get_str ( qdict , "arg1" ) ;
const char * arg2 = qdict_get_try_str ( qdict , "arg2" ) ;
const char * arg3 = qdict_get_try_str ( qdict , "arg3" ) ;
if ( arg2 ) {
s = slirp_lookup ( mon , arg1 , arg2 ) ;
redir_str = arg3 ;
}
else {
s = slirp_lookup ( mon , NULL , NULL ) ;
redir_str = arg1 ;
}
if ( s ) {
slirp_hostfwd ( s , redir_str , 0 ) ;
}
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static gint dissect_ac_if_input_terminal ( tvbuff_t * tvb , gint offset , packet_info * pinfo _U_ , proto_tree * tree , usb_conv_info_t * usb_conv_info _U_ ) {
gint offset_start ;
offset_start = offset ;
proto_tree_add_item ( tree , hf_ac_if_input_terminalid , tvb , offset , 1 , ENC_LITTLE_ENDIAN ) ;
offset += 1 ;
proto_tree_add_item ( tree , hf_ac_if_input_terminaltype , tvb , offset , 2 , ENC_LITTLE_ENDIAN ) ;
offset += 2 ;
proto_tree_add_item ( tree , hf_ac_if_input_assocterminal , tvb , offset , 1 , ENC_LITTLE_ENDIAN ) ;
offset += 1 ;
proto_tree_add_item ( tree , hf_ac_if_input_nrchannels , tvb , offset , 1 , ENC_LITTLE_ENDIAN ) ;
offset += 1 ;
proto_tree_add_item ( tree , hf_ac_if_input_channelconfig , tvb , offset , 2 , ENC_LITTLE_ENDIAN ) ;
offset += 2 ;
proto_tree_add_item ( tree , hf_ac_if_input_channelnames , tvb , offset , 1 , ENC_LITTLE_ENDIAN ) ;
offset += 1 ;
proto_tree_add_item ( tree , hf_ac_if_input_terminal , tvb , offset , 1 , ENC_LITTLE_ENDIAN ) ;
offset += 1 ;
return offset - offset_start ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static void e1000e_set_fcrth ( E1000ECore * core , int index , uint32_t val ) {
core -> mac [ FCRTH ] = val & 0xFFF8 ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | void SortTocFromFile ( Archive * AHX ) {
ArchiveHandle * AH = ( ArchiveHandle * ) AHX ;
RestoreOptions * ropt = AH -> public . ropt ;
FILE * fh ;
char buf [ 100 ] ;
bool incomplete_line ;
ropt -> idWanted = ( bool * ) pg_malloc ( sizeof ( bool ) * AH -> maxDumpId ) ;
memset ( ropt -> idWanted , 0 , sizeof ( bool ) * AH -> maxDumpId ) ;
fh = fopen ( ropt -> tocFile , PG_BINARY_R ) ;
if ( ! fh ) exit_horribly ( modulename , "could not open TOC file \"%s\": %s\n" , ropt -> tocFile , strerror ( errno ) ) ;
incomplete_line = false ;
while ( fgets ( buf , sizeof ( buf ) , fh ) != NULL ) {
bool prev_incomplete_line = incomplete_line ;
int buflen ;
char * cmnt ;
char * endptr ;
DumpId id ;
TocEntry * te ;
buflen = strlen ( buf ) ;
if ( buflen > 0 && buf [ buflen - 1 ] == '\n' ) incomplete_line = false ;
else incomplete_line = true ;
if ( prev_incomplete_line ) continue ;
cmnt = strchr ( buf , ';
' ) ;
if ( cmnt != NULL ) cmnt [ 0 ] = '\0' ;
if ( strspn ( buf , " \t\r\n" ) == strlen ( buf ) ) continue ;
id = strtol ( buf , & endptr , 10 ) ;
if ( endptr == buf || id <= 0 || id > AH -> maxDumpId || ropt -> idWanted [ id - 1 ] ) {
write_msg ( modulename , "WARNING: line ignored: %s\n" , buf ) ;
continue ;
}
te = getTocEntryByDumpId ( AH , id ) ;
if ( ! te ) exit_horribly ( modulename , "could not find entry for ID %d\n" , id ) ;
ropt -> idWanted [ id - 1 ] = true ;
_moveBefore ( AH , AH -> toc , te ) ;
}
if ( fclose ( fh ) != 0 ) exit_horribly ( modulename , "could not close TOC file: %s\n" , strerror ( errno ) ) ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static vpx_codec_err_t ctrl_use_reference ( vpx_codec_alg_priv_t * ctx , va_list args ) {
const int reference_flag = va_arg ( args , int ) ;
vp9_use_as_reference ( ctx -> cpi , reference_flag ) ;
return VPX_CODEC_OK ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static inline int handle_cpu_signal ( uintptr_t pc , unsigned long address , int is_write , sigset_t * old_set , void * puc ) {
int ret ;
# if defined ( DEBUG_SIGNAL ) qemu_printf ( "qemu: SIGSEGV pc=0x%08lx address=%08lx w=%d oldset=0x%08lx\n" , pc , address , is_write , * ( unsigned long * ) old_set ) ;
# endif if ( is_write && h2g_valid ( address ) && page_unprotect ( h2g ( address ) , pc , puc ) ) {
return 1 ;
}
ret = cpu_handle_mmu_fault ( cpu_single_env , address , is_write , MMU_USER_IDX ) ;
if ( ret < 0 ) {
return 0 ;
}
if ( ret == 0 ) {
return 1 ;
}
cpu_restore_state ( cpu_single_env , pc ) ;
sigprocmask ( SIG_SETMASK , old_set , NULL ) ;
exception_action ( cpu_single_env ) ;
return 1 ;
} | 1True
|
Categorize the following code snippet as vulnerable or not. True or False | static int dissect_h225_BOOLEAN ( tvbuff_t * tvb _U_ , int offset _U_ , asn1_ctx_t * actx _U_ , proto_tree * tree _U_ , int hf_index _U_ ) {
offset = dissect_per_boolean ( tvb , offset , actx , tree , hf_index , NULL ) ;
return offset ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | void vp9_init_second_pass_spatial_svc ( VP9_COMP * cpi ) {
SVC * const svc = & cpi -> svc ;
int i ;
for ( i = 0 ;
i < svc -> number_spatial_layers ;
++ i ) {
TWO_PASS * const twopass = & svc -> layer_context [ i ] . twopass ;
svc -> spatial_layer_id = i ;
vp9_init_second_pass ( cpi ) ;
twopass -> total_stats . spatial_layer_id = i ;
twopass -> total_left_stats . spatial_layer_id = i ;
}
svc -> spatial_layer_id = 0 ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static const char * rsvp_host_get_filter_type ( hostlist_talker_t * host _U_ , conv_filter_type_e filter ) {
if ( ( filter == CONV_FT_ANY_ADDRESS ) && ( host -> myaddress . type == AT_IPv4 ) ) return "ip.addr" ;
return CONV_FILTER_INVALID ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static void aacsbr_func_ptr_init ( AACSBRContext * c ) {
c -> sbr_lf_gen = sbr_lf_gen ;
c -> sbr_hf_assemble = sbr_hf_assemble ;
c -> sbr_x_gen = sbr_x_gen ;
c -> sbr_hf_inverse_filter = sbr_hf_inverse_filter ;
if ( ARCH_MIPS ) ff_aacsbr_func_ptr_init_mips ( c ) ;
} | 1True
|
Categorize the following code snippet as vulnerable or not. True or False | static void SvcEncode ( VP9_COMP * cpi , size_t * size , uint8_t * dest , unsigned int * frame_flags ) {
vp9_rc_get_svc_params ( cpi ) ;
encode_frame_to_data_rate ( cpi , size , dest , frame_flags ) ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static int dump_all_tablespaces ( ) {
return dump_tablespaces ( NULL ) ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | int vp8_denoiser_filter_c ( unsigned char * mc_running_avg_y , int mc_avg_y_stride , unsigned char * running_avg_y , int avg_y_stride , unsigned char * sig , int sig_stride , unsigned int motion_magnitude , int increase_denoising ) {
unsigned char * running_avg_y_start = running_avg_y ;
unsigned char * sig_start = sig ;
int sum_diff_thresh ;
int r , c ;
int sum_diff = 0 ;
int adj_val [ 3 ] = {
3 , 4 , 6 }
;
int shift_inc1 = 0 ;
int shift_inc2 = 1 ;
int col_sum [ 16 ] = {
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 }
;
if ( motion_magnitude <= MOTION_MAGNITUDE_THRESHOLD ) {
if ( increase_denoising ) {
shift_inc1 = 1 ;
shift_inc2 = 2 ;
}
adj_val [ 0 ] += shift_inc2 ;
adj_val [ 1 ] += shift_inc2 ;
adj_val [ 2 ] += shift_inc2 ;
}
for ( r = 0 ;
r < 16 ;
++ r ) {
for ( c = 0 ;
c < 16 ;
++ c ) {
int diff = 0 ;
int adjustment = 0 ;
int absdiff = 0 ;
diff = mc_running_avg_y [ c ] - sig [ c ] ;
absdiff = abs ( diff ) ;
if ( absdiff <= 3 + shift_inc1 ) {
running_avg_y [ c ] = mc_running_avg_y [ c ] ;
col_sum [ c ] += diff ;
}
else {
if ( absdiff >= 4 + shift_inc1 && absdiff <= 7 ) adjustment = adj_val [ 0 ] ;
else if ( absdiff >= 8 && absdiff <= 15 ) adjustment = adj_val [ 1 ] ;
else adjustment = adj_val [ 2 ] ;
if ( diff > 0 ) {
if ( ( sig [ c ] + adjustment ) > 255 ) running_avg_y [ c ] = 255 ;
else running_avg_y [ c ] = sig [ c ] + adjustment ;
col_sum [ c ] += adjustment ;
}
else {
if ( ( sig [ c ] - adjustment ) < 0 ) running_avg_y [ c ] = 0 ;
else running_avg_y [ c ] = sig [ c ] - adjustment ;
col_sum [ c ] -= adjustment ;
}
}
}
sig += sig_stride ;
mc_running_avg_y += mc_avg_y_stride ;
running_avg_y += avg_y_stride ;
}
for ( c = 0 ;
c < 16 ;
++ c ) {
if ( col_sum [ c ] >= 128 ) {
col_sum [ c ] = 127 ;
}
sum_diff += col_sum [ c ] ;
}
sum_diff_thresh = SUM_DIFF_THRESHOLD ;
if ( increase_denoising ) sum_diff_thresh = SUM_DIFF_THRESHOLD_HIGH ;
if ( abs ( sum_diff ) > sum_diff_thresh ) {
int delta = ( ( abs ( sum_diff ) - sum_diff_thresh ) >> 8 ) + 1 ;
if ( delta < 4 ) {
sig -= sig_stride * 16 ;
mc_running_avg_y -= mc_avg_y_stride * 16 ;
running_avg_y -= avg_y_stride * 16 ;
for ( r = 0 ;
r < 16 ;
++ r ) {
for ( c = 0 ;
c < 16 ;
++ c ) {
int diff = mc_running_avg_y [ c ] - sig [ c ] ;
int adjustment = abs ( diff ) ;
if ( adjustment > delta ) adjustment = delta ;
if ( diff > 0 ) {
if ( running_avg_y [ c ] - adjustment < 0 ) running_avg_y [ c ] = 0 ;
else running_avg_y [ c ] = running_avg_y [ c ] - adjustment ;
col_sum [ c ] -= adjustment ;
}
else if ( diff < 0 ) {
if ( running_avg_y [ c ] + adjustment > 255 ) running_avg_y [ c ] = 255 ;
else running_avg_y [ c ] = running_avg_y [ c ] + adjustment ;
col_sum [ c ] += adjustment ;
}
}
sig += sig_stride ;
mc_running_avg_y += mc_avg_y_stride ;
running_avg_y += avg_y_stride ;
}
sum_diff = 0 ;
for ( c = 0 ;
c < 16 ;
++ c ) {
if ( col_sum [ c ] >= 128 ) {
col_sum [ c ] = 127 ;
}
sum_diff += col_sum [ c ] ;
}
if ( abs ( sum_diff ) > sum_diff_thresh ) return COPY_BLOCK ;
}
else {
return COPY_BLOCK ;
}
}
vp8_copy_mem16x16 ( running_avg_y_start , avg_y_stride , sig_start , sig_stride ) ;
return FILTER_BLOCK ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | struct event_base * event_init ( void ) {
struct event_base * base = event_base_new ( ) ;
if ( base != NULL ) current_base = base ;
return ( base ) ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static guint32 dissect_netb_receive_continue ( tvbuff_t * tvb , packet_info * pinfo _U_ , int offset , proto_tree * tree ) {
nb_xmit_corrl ( tvb , offset , tree ) ;
nb_remote_session ( tvb , offset , tree ) ;
nb_local_session ( tvb , offset , tree ) ;
return 0 ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static void lsf2lsp_3 ( AMRContext * p ) {
const uint16_t * lsf_param = p -> frame . lsf ;
int16_t lsf_r [ LP_FILTER_ORDER ] ;
float lsf_q [ LP_FILTER_ORDER ] ;
const int16_t * lsf_quantizer ;
int i , j ;
lsf_quantizer = ( p -> cur_frame_mode == MODE_7k95 ? lsf_3_1_MODE_7k95 : lsf_3_1 ) [ lsf_param [ 0 ] ] ;
memcpy ( lsf_r , lsf_quantizer , 3 * sizeof ( * lsf_r ) ) ;
lsf_quantizer = lsf_3_2 [ lsf_param [ 1 ] << ( p -> cur_frame_mode <= MODE_5k15 ) ] ;
memcpy ( lsf_r + 3 , lsf_quantizer , 3 * sizeof ( * lsf_r ) ) ;
lsf_quantizer = ( p -> cur_frame_mode <= MODE_5k15 ? lsf_3_3_MODE_5k15 : lsf_3_3 ) [ lsf_param [ 2 ] ] ;
memcpy ( lsf_r + 6 , lsf_quantizer , 4 * sizeof ( * lsf_r ) ) ;
for ( i = 0 ;
i < LP_FILTER_ORDER ;
i ++ ) lsf_q [ i ] = ( lsf_r [ i ] + p -> prev_lsf_r [ i ] * pred_fac [ i ] ) * ( LSF_R_FAC / 8000.0 ) + lsf_3_mean [ i ] * ( 1.0 / 8000.0 ) ;
ff_set_min_dist_lsf ( lsf_q , MIN_LSF_SPACING , LP_FILTER_ORDER ) ;
interpolate_lsf ( p -> lsf_q , lsf_q ) ;
memcpy ( p -> prev_lsf_r , lsf_r , LP_FILTER_ORDER * sizeof ( * lsf_r ) ) ;
ff_acelp_lsf2lspd ( p -> lsp [ 3 ] , lsf_q , LP_FILTER_ORDER ) ;
for ( i = 1 ;
i <= 3 ;
i ++ ) for ( j = 0 ;
j < LP_FILTER_ORDER ;
j ++ ) p -> lsp [ i - 1 ] [ j ] = p -> prev_lsp_sub4 [ j ] + ( p -> lsp [ 3 ] [ j ] - p -> prev_lsp_sub4 [ j ] ) * 0.25 * i ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static ASN1_STRING * obj_to_asn1derstr ( VALUE obj ) {
ASN1_STRING * a1str ;
VALUE str ;
str = ossl_to_der ( obj ) ;
if ( ! ( a1str = ASN1_STRING_new ( ) ) ) ossl_raise ( eASN1Error , NULL ) ;
ASN1_STRING_set ( a1str , RSTRING_PTR ( str ) , RSTRING_LENINT ( str ) ) ;
return a1str ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static void pdf_set_shade ( fz_context * ctx , pdf_run_processor * pr , int what , fz_shade * shade ) {
pdf_gstate * gs ;
pdf_material * mat ;
gs = pdf_flush_text ( ctx , pr ) ;
mat = what == PDF_FILL ? & gs -> fill : & gs -> stroke ;
fz_drop_shade ( ctx , mat -> shade ) ;
mat -> kind = PDF_MAT_SHADE ;
mat -> shade = fz_keep_shade ( ctx , shade ) ;
mat -> gstate_num = pr -> gparent ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static void _HZOpen ( UConverter * cnv , UConverterLoadArgs * pArgs , UErrorCode * errorCode ) {
UConverter * gbConverter ;
if ( pArgs -> onlyTestIsLoadable ) {
ucnv_canCreateConverter ( "GBK" , errorCode ) ;
return ;
}
gbConverter = ucnv_open ( "GBK" , errorCode ) ;
if ( U_FAILURE ( * errorCode ) ) {
return ;
}
cnv -> toUnicodeStatus = 0 ;
cnv -> fromUnicodeStatus = 0 ;
cnv -> mode = 0 ;
cnv -> fromUChar32 = 0x0000 ;
cnv -> extraInfo = uprv_calloc ( 1 , sizeof ( UConverterDataHZ ) ) ;
if ( cnv -> extraInfo != NULL ) {
( ( UConverterDataHZ * ) cnv -> extraInfo ) -> gbConverter = gbConverter ;
}
else {
ucnv_close ( gbConverter ) ;
* errorCode = U_MEMORY_ALLOCATION_ERROR ;
return ;
}
} | 1True
|
Categorize the following code snippet as vulnerable or not. True or False | static int dissect_cipsafety ( tvbuff_t * tvb , packet_info * pinfo , proto_tree * tree , void * data _U_ ) {
proto_item * ti ;
proto_tree * safety_tree ;
ti = proto_tree_add_item ( tree , proto_cipsafety , tvb , 0 , - 1 , ENC_NA ) ;
safety_tree = proto_item_add_subtree ( ti , ett_cip_safety ) ;
dissect_cip_safety_data ( safety_tree , ti , tvb , tvb_reported_length ( tvb ) , pinfo ) ;
return tvb_captured_length ( tvb ) ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | void vp9_remove_common ( VP9_COMMON * cm ) {
vp9_free_ref_frame_buffers ( cm ) ;
vp9_free_context_buffers ( cm ) ;
vp9_free_internal_frame_buffers ( & cm -> int_frame_buffers ) ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | TEST_F ( ExtensionWelcomeNotificationTest , DismissWelcomeNotification ) {
StartPreferenceSyncing ( ) ;
EXPECT_FALSE ( GetBooleanPref ( prefs : : kWelcomeNotificationDismissed ) ) ;
EXPECT_FALSE ( GetBooleanPref ( prefs : : kWelcomeNotificationDismissedLocal ) ) ;
EXPECT_FALSE ( GetBooleanPref ( prefs : : kWelcomeNotificationPreviouslyPoppedUp ) ) ;
ShowChromeNowNotification ( ) ;
message_center ( ) -> CloseCurrentNotification ( ) ;
FlushMessageLoop ( ) ;
EXPECT_EQ ( message_center ( ) -> add_notification_calls ( ) , 1 ) ;
EXPECT_EQ ( message_center ( ) -> remove_notification_calls ( ) , 1 ) ;
EXPECT_EQ ( message_center ( ) -> notifications_with_shown_as_popup ( ) , 0 ) ;
EXPECT_FALSE ( GetBooleanPref ( prefs : : kWelcomeNotificationDismissed ) ) ;
EXPECT_TRUE ( GetBooleanPref ( prefs : : kWelcomeNotificationDismissedLocal ) ) ;
EXPECT_TRUE ( GetBooleanPref ( prefs : : kWelcomeNotificationPreviouslyPoppedUp ) ) ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static int encode_slices ( VC2EncContext * s ) {
uint8_t * buf ;
int slice_x , slice_y , skip = 0 ;
SliceArgs * enc_args = s -> slice_args ;
avpriv_align_put_bits ( & s -> pb ) ;
flush_put_bits ( & s -> pb ) ;
buf = put_bits_ptr ( & s -> pb ) ;
for ( slice_y = 0 ;
slice_y < s -> num_y ;
slice_y ++ ) {
for ( slice_x = 0 ;
slice_x < s -> num_x ;
slice_x ++ ) {
SliceArgs * args = & enc_args [ s -> num_x * slice_y + slice_x ] ;
init_put_bits ( & args -> pb , buf + skip , args -> bytes + s -> prefix_bytes ) ;
skip += args -> bytes ;
}
}
s -> avctx -> execute ( s -> avctx , encode_hq_slice , enc_args , NULL , s -> num_x * s -> num_y , sizeof ( SliceArgs ) ) ;
skip_put_bytes ( & s -> pb , skip ) ;
return 0 ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | int i2d_ ## name ( const type * a , unsigned char * * out ) ;
DECLARE_ASN1_ITEM ( name ) # define DECLARE_ASN1_NDEF_FUNCTION ( name ) int i2d_ ## name ## _NDEF ( name * a , unsigned char * * out ) ;
# define DECLARE_ASN1_FUNCTIONS_const ( name ) DECLARE_ASN1_ALLOC_FUNCTIONS ( name ) DECLARE_ASN1_ENCODE_FUNCTIONS_const ( name , name ) # define DECLARE_ASN1_ALLOC_FUNCTIONS_name ( type , name ) type * name ## _new ( void ) ;
void name ## _free ( type * a ) ;
# define DECLARE_ASN1_PRINT_FUNCTION ( stname ) DECLARE_ASN1_PRINT_FUNCTION_fname ( stname , stname ) # define DECLARE_ASN1_PRINT_FUNCTION_fname ( stname , fname ) int fname ## _print_ctx ( BIO * out , stname * x , int indent , const ASN1_PCTX * pctx ) ;
# define D2I_OF ( type ) type * ( * ) ( type * * , const unsigned char * * , long ) # define I2D_OF ( type ) int ( * ) ( type * , unsigned char * * ) # define I2D_OF_const ( type ) int ( * ) ( const type * , unsigned char * * ) # define CHECKED_D2I_OF ( type , d2i ) ( ( d2i_of_void * ) ( 1 ? d2i : ( ( D2I_OF ( type ) ) 0 ) ) ) # define CHECKED_I2D_OF ( type , i2d ) ( ( i2d_of_void * ) ( 1 ? i2d : ( ( I2D_OF ( type ) ) 0 ) ) ) # define CHECKED_NEW_OF ( type , xnew ) ( ( void * ( * ) ( void ) ) ( 1 ? xnew : ( ( type * ( * ) ( void ) ) 0 ) ) ) # define CHECKED_PTR_OF ( type , p ) ( ( void * ) ( 1 ? p : ( type * ) 0 ) ) # define CHECKED_PPTR_OF ( type , p ) ( ( void * * ) ( 1 ? p : ( type * * ) 0 ) ) # define TYPEDEF_D2I_OF ( type ) typedef type * d2i_of_ ## type ( type * * , const unsigned char * * , long ) # define TYPEDEF_I2D_OF ( type ) typedef int i2d_of_ ## type ( type * , unsigned char * * ) # define TYPEDEF_D2I2D_OF ( type ) TYPEDEF_D2I_OF ( type ) ;
TYPEDEF_I2D_OF ( type ) TYPEDEF_D2I2D_OF ( void ) ;
# ifndef OPENSSL_EXPORT_VAR_AS_FUNCTION typedef const ASN1_ITEM ASN1_ITEM_EXP ;
# define ASN1_ITEM_ptr ( iptr ) ( iptr ) # define ASN1_ITEM_ref ( iptr ) ( & ( iptr ## _it ) ) # define ASN1_ITEM_rptr ( ref ) ( & ( ref ## _it ) ) # define DECLARE_ASN1_ITEM ( name ) OPENSSL_EXTERN const ASN1_ITEM name ## _it ;
# else typedef const ASN1_ITEM * ASN1_ITEM_EXP ( void ) ;
# define ASN1_ITEM_ptr ( iptr ) ( iptr ( ) ) # define ASN1_ITEM_ref ( iptr ) ( iptr ## _it ) # define ASN1_ITEM_rptr ( ref ) ( ref ## _it ( ) ) # define DECLARE_ASN1_ITEM ( name ) const ASN1_ITEM * name ## _it ( void ) ;
# endif # define ASN1_STRFLGS_ESC_2253 1 # define ASN1_STRFLGS_ESC_CTRL 2 # define ASN1_STRFLGS_ESC_MSB 4 # define ASN1_STRFLGS_ESC_QUOTE 8 # define CHARTYPE_PRINTABLESTRING 0x10 # define CHARTYPE_FIRST_ESC_2253 0x20 # define CHARTYPE_LAST_ESC_2253 0x40 # define ASN1_STRFLGS_UTF8_CONVERT 0x10 # define ASN1_STRFLGS_IGNORE_TYPE 0x20 # define ASN1_STRFLGS_SHOW_TYPE 0x40 # define ASN1_STRFLGS_DUMP_ALL 0x80 # define ASN1_STRFLGS_DUMP_UNKNOWN 0x100 # define ASN1_STRFLGS_DUMP_DER 0x200 # define ASN1_STRFLGS_ESC_2254 0x400 # define ASN1_STRFLGS_RFC2253 ( ASN1_STRFLGS_ESC_2253 | ASN1_STRFLGS_ESC_CTRL | ASN1_STRFLGS_ESC_MSB | ASN1_STRFLGS_UTF8_CONVERT | ASN1_STRFLGS_DUMP_UNKNOWN | ASN1_STRFLGS_DUMP_DER ) DEFINE_STACK_OF ( ASN1_INTEGER ) DEFINE_STACK_OF ( ASN1_GENERALSTRING ) DEFINE_STACK_OF ( ASN1_UTF8STRING ) typedef struct asn1_type_st {
int type ;
union {
char * ptr ;
ASN1_BOOLEAN boolean ;
ASN1_STRING * asn1_string ;
ASN1_OBJECT * object ;
ASN1_INTEGER * integer ;
ASN1_ENUMERATED * enumerated ;
ASN1_BIT_STRING * bit_string ;
ASN1_OCTET_STRING * octet_string ;
ASN1_PRINTABLESTRING * printablestring ;
ASN1_T61STRING * t61string ;
ASN1_IA5STRING * ia5string ;
ASN1_GENERALSTRING * generalstring ;
ASN1_BMPSTRING * bmpstring ;
ASN1_UNIVERSALSTRING * universalstring ;
ASN1_UTCTIME * utctime ;
ASN1_GENERALIZEDTIME * generalizedtime ;
ASN1_VISIBLESTRING * visiblestring ;
ASN1_UTF8STRING * utf8string ;
ASN1_STRING * set ;
ASN1_STRING * sequence ;
ASN1_VALUE * asn1_value ;
}
value ;
}
ASN1_TYPE ;
DEFINE_STACK_OF ( ASN1_TYPE ) typedef STACK_OF ( ASN1_TYPE ) ASN1_SEQUENCE_ANY ;
DECLARE_ASN1_ENCODE_FUNCTIONS_const ( ASN1_SEQUENCE_ANY , ASN1_SEQUENCE_ANY ) DECLARE_ASN1_ENCODE_FUNCTIONS_const ( ASN1_SEQUENCE_ANY , ASN1_SET_ANY ) | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static int dissect_h225_RasUsageInfoTypes ( tvbuff_t * tvb _U_ , int offset _U_ , asn1_ctx_t * actx _U_ , proto_tree * tree _U_ , int hf_index _U_ ) {
offset = dissect_per_sequence ( tvb , offset , actx , tree , hf_index , ett_h225_RasUsageInfoTypes , RasUsageInfoTypes_sequence ) ;
return offset ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | struct st_replace_regex * init_replace_regex ( char * expr ) {
struct st_replace_regex * res ;
char * buf , * expr_end ;
char * p ;
char * buf_p ;
uint expr_len = strlen ( expr ) ;
char last_c = 0 ;
struct st_regex reg ;
res = ( struct st_replace_regex * ) my_malloc ( sizeof ( * res ) + expr_len , MYF ( MY_FAE + MY_WME ) ) ;
my_init_dynamic_array ( & res -> regex_arr , sizeof ( struct st_regex ) , 128 , 128 ) ;
buf = ( char * ) res + sizeof ( * res ) ;
expr_end = expr + expr_len ;
p = expr ;
buf_p = buf ;
while ( p < expr_end ) {
bzero ( & reg , sizeof ( reg ) ) ;
while ( p < expr_end ) {
if ( * p == '/' ) break ;
p ++ ;
}
if ( p == expr_end || ++ p == expr_end ) {
if ( res -> regex_arr . elements ) break ;
else goto err ;
}
reg . pattern = buf_p ;
PARSE_REGEX_ARG if ( p == expr_end || ++ p == expr_end ) goto err ;
reg . replace = buf_p ;
PARSE_REGEX_ARG if ( p == expr_end ) goto err ;
p ++ ;
if ( p < expr_end && * p == 'i' ) reg . icase = 1 ;
if ( insert_dynamic ( & res -> regex_arr , ( uchar * ) & reg ) ) die ( "Out of memory" ) ;
}
res -> odd_buf_len = res -> even_buf_len = 8192 ;
res -> even_buf = ( char * ) my_malloc ( res -> even_buf_len , MYF ( MY_WME + MY_FAE ) ) ;
res -> odd_buf = ( char * ) my_malloc ( res -> odd_buf_len , MYF ( MY_WME + MY_FAE ) ) ;
res -> buf = res -> even_buf ;
return res ;
err : my_free ( res ) ;
die ( "Error parsing replace_regex \"%s\"" , expr ) ;
return 0 ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static int decode_packet ( AVCodecContext * avctx , void * data , int * got_frame_ptr , AVPacket * avpkt ) {
WMAProDecodeCtx * s = avctx -> priv_data ;
GetBitContext * gb = & s -> pgb ;
const uint8_t * buf = avpkt -> data ;
int buf_size = avpkt -> size ;
int num_bits_prev_frame ;
int packet_sequence_number ;
* got_frame_ptr = 0 ;
if ( s -> packet_done || s -> packet_loss ) {
s -> packet_done = 0 ;
if ( buf_size < avctx -> block_align ) return 0 ;
s -> next_packet_start = buf_size - avctx -> block_align ;
buf_size = avctx -> block_align ;
s -> buf_bit_size = buf_size << 3 ;
init_get_bits ( gb , buf , s -> buf_bit_size ) ;
packet_sequence_number = get_bits ( gb , 4 ) ;
skip_bits ( gb , 2 ) ;
num_bits_prev_frame = get_bits ( gb , s -> log2_frame_size ) ;
av_dlog ( avctx , "packet[%d]: nbpf %x\n" , avctx -> frame_number , num_bits_prev_frame ) ;
if ( ! s -> packet_loss && ( ( s -> packet_sequence_number + 1 ) & 0xF ) != packet_sequence_number ) {
s -> packet_loss = 1 ;
av_log ( avctx , AV_LOG_ERROR , "Packet loss detected! seq %x vs %x\n" , s -> packet_sequence_number , packet_sequence_number ) ;
}
s -> packet_sequence_number = packet_sequence_number ;
if ( num_bits_prev_frame > 0 ) {
int remaining_packet_bits = s -> buf_bit_size - get_bits_count ( gb ) ;
if ( num_bits_prev_frame >= remaining_packet_bits ) {
num_bits_prev_frame = remaining_packet_bits ;
s -> packet_done = 1 ;
}
save_bits ( s , gb , num_bits_prev_frame , 1 ) ;
av_dlog ( avctx , "accumulated %x bits of frame data\n" , s -> num_saved_bits - s -> frame_offset ) ;
if ( ! s -> packet_loss ) decode_frame ( s , data , got_frame_ptr ) ;
}
else if ( s -> num_saved_bits - s -> frame_offset ) {
av_dlog ( avctx , "ignoring %x previously saved bits\n" , s -> num_saved_bits - s -> frame_offset ) ;
}
if ( s -> packet_loss ) {
s -> num_saved_bits = 0 ;
s -> packet_loss = 0 ;
}
}
else {
int frame_size ;
s -> buf_bit_size = ( avpkt -> size - s -> next_packet_start ) << 3 ;
init_get_bits ( gb , avpkt -> data , s -> buf_bit_size ) ;
skip_bits ( gb , s -> packet_offset ) ;
if ( s -> len_prefix && remaining_bits ( s , gb ) > s -> log2_frame_size && ( frame_size = show_bits ( gb , s -> log2_frame_size ) ) && frame_size <= remaining_bits ( s , gb ) ) {
save_bits ( s , gb , frame_size , 0 ) ;
s -> packet_done = ! decode_frame ( s , data , got_frame_ptr ) ;
}
else if ( ! s -> len_prefix && s -> num_saved_bits > get_bits_count ( & s -> gb ) ) {
s -> packet_done = ! decode_frame ( s , data , got_frame_ptr ) ;
}
else s -> packet_done = 1 ;
}
if ( s -> packet_done && ! s -> packet_loss && remaining_bits ( s , gb ) > 0 ) {
save_bits ( s , gb , remaining_bits ( s , gb ) , 0 ) ;
}
s -> packet_offset = get_bits_count ( gb ) & 7 ;
if ( s -> packet_loss ) return AVERROR_INVALIDDATA ;
return get_bits_count ( gb ) >> 3 ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | void * xmlListReverseSearch ( xmlListPtr l , void * data ) {
xmlLinkPtr lk ;
if ( l == NULL ) return ( NULL ) ;
lk = xmlListLinkReverseSearch ( l , data ) ;
if ( lk ) return ( lk -> data ) ;
return NULL ;
} | 1True
|
Categorize the following code snippet as vulnerable or not. True or False | static int dissect_h245_Cmd_aal1 ( tvbuff_t * tvb _U_ , int offset _U_ , asn1_ctx_t * actx _U_ , proto_tree * tree _U_ , int hf_index _U_ ) {
offset = dissect_per_sequence ( tvb , offset , actx , tree , hf_index , ett_h245_Cmd_aal1 , Cmd_aal1_sequence ) ;
return offset ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static int roq_encode_init ( AVCodecContext * avctx ) {
RoqContext * enc = avctx -> priv_data ;
av_lfg_init ( & enc -> randctx , 1 ) ;
enc -> framesSinceKeyframe = 0 ;
if ( ( avctx -> width & 0xf ) || ( avctx -> height & 0xf ) ) {
av_log ( avctx , AV_LOG_ERROR , "Dimensions must be divisible by 16\n" ) ;
return - 1 ;
}
if ( ( ( avctx -> width ) & ( avctx -> width - 1 ) ) || ( ( avctx -> height ) & ( avctx -> height - 1 ) ) ) av_log ( avctx , AV_LOG_ERROR , "Warning: dimensions not power of two\n" ) ;
enc -> width = avctx -> width ;
enc -> height = avctx -> height ;
enc -> framesSinceKeyframe = 0 ;
enc -> first_frame = 1 ;
enc -> last_frame = av_frame_alloc ( ) ;
enc -> current_frame = av_frame_alloc ( ) ;
if ( ! enc -> last_frame || ! enc -> current_frame ) {
roq_encode_end ( avctx ) ;
return AVERROR ( ENOMEM ) ;
}
enc -> tmpData = av_malloc ( sizeof ( RoqTempdata ) ) ;
enc -> this_motion4 = av_mallocz ( ( enc -> width * enc -> height / 16 ) * sizeof ( motion_vect ) ) ;
enc -> last_motion4 = av_malloc ( ( enc -> width * enc -> height / 16 ) * sizeof ( motion_vect ) ) ;
enc -> this_motion8 = av_mallocz ( ( enc -> width * enc -> height / 64 ) * sizeof ( motion_vect ) ) ;
enc -> last_motion8 = av_malloc ( ( enc -> width * enc -> height / 64 ) * sizeof ( motion_vect ) ) ;
return 0 ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static gboolean should_get_directory_count_now ( NautilusFile * file ) {
return lacks_directory_count ( file ) && ! file -> details -> loading_directory ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static void decApplyRound ( decNumber * dn , decContext * set , Int residue , uInt * status ) {
Int bump ;
if ( residue == 0 ) return ;
bump = 0 ;
switch ( set -> round ) {
case DEC_ROUND_05UP : {
Int lsd5 = * dn -> lsu % 5 ;
if ( residue < 0 && lsd5 != 1 ) bump = - 1 ;
else if ( residue > 0 && lsd5 == 0 ) bump = 1 ;
break ;
}
case DEC_ROUND_DOWN : {
if ( residue < 0 ) bump = - 1 ;
break ;
}
case DEC_ROUND_HALF_DOWN : {
if ( residue > 5 ) bump = 1 ;
break ;
}
case DEC_ROUND_HALF_EVEN : {
if ( residue > 5 ) bump = 1 ;
else if ( residue == 5 ) {
if ( * dn -> lsu & 0x01 ) bump = 1 ;
}
break ;
}
case DEC_ROUND_HALF_UP : {
if ( residue >= 5 ) bump = 1 ;
break ;
}
case DEC_ROUND_UP : {
if ( residue > 0 ) bump = 1 ;
break ;
}
case DEC_ROUND_CEILING : {
if ( decNumberIsNegative ( dn ) ) {
if ( residue < 0 ) bump = - 1 ;
}
else {
if ( residue > 0 ) bump = 1 ;
}
break ;
}
case DEC_ROUND_FLOOR : {
if ( ! decNumberIsNegative ( dn ) ) {
if ( residue < 0 ) bump = - 1 ;
}
else {
if ( residue > 0 ) bump = 1 ;
}
break ;
}
default : {
* status |= DEC_Invalid_context ;
# if DECTRACE || ( DECCHECK && DECVERB ) printf ( "Unknown rounding mode: %d\n" , set -> round ) ;
# endif break ;
}
}
if ( bump == 0 ) return ;
if ( bump > 0 ) {
Unit * up ;
uInt count = dn -> digits ;
for ( up = dn -> lsu ;
;
up ++ ) {
if ( count <= DECDPUN ) {
if ( * up != powers [ count ] - 1 ) break ;
* up = ( Unit ) powers [ count - 1 ] ;
for ( up = up - 1 ;
up >= dn -> lsu ;
up -- ) * up = 0 ;
dn -> exponent ++ ;
if ( ( dn -> exponent + dn -> digits ) > set -> emax + 1 ) {
decSetOverflow ( dn , set , status ) ;
}
return ;
}
if ( * up != DECDPUNMAX ) break ;
count -= DECDPUN ;
}
}
else {
Unit * up , * sup ;
uInt count = dn -> digits ;
for ( up = dn -> lsu ;
;
up ++ ) {
if ( count <= DECDPUN ) {
if ( * up != powers [ count - 1 ] ) break ;
sup = up ;
* up = ( Unit ) powers [ count ] - 1 ;
for ( up = up - 1 ;
up >= dn -> lsu ;
up -- ) * up = ( Unit ) powers [ DECDPUN ] - 1 ;
dn -> exponent -- ;
if ( dn -> exponent + 1 == set -> emin - set -> digits + 1 ) {
if ( count == 1 && dn -> digits == 1 ) * sup = 0 ;
else {
* sup = ( Unit ) powers [ count - 1 ] - 1 ;
dn -> digits -- ;
}
dn -> exponent ++ ;
* status |= DEC_Underflow | DEC_Subnormal | DEC_Inexact | DEC_Rounded ;
}
return ;
}
if ( * up != 0 ) break ;
count -= DECDPUN ;
}
}
decUnitAddSub ( dn -> lsu , D2U ( dn -> digits ) , uarrone , 1 , 0 , dn -> lsu , bump ) ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static gs_memory_type_ptr_t i_object_type ( const gs_memory_t * mem , const void * obj ) {
return ( ( const obj_header_t * ) obj - 1 ) -> o_type ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static int znoaccess ( i_ctx_t * i_ctx_p ) {
os_ptr op = osp ;
check_op ( 1 ) ;
if ( r_has_type ( op , t_dictionary ) ) {
ref * aop = dict_access_ref ( op ) ;
if ( ! r_has_attrs ( aop , a_write ) ) {
if ( ! r_has_attrs ( aop , a_read ) && ! r_has_attrs ( aop , a_execute ) ) {
return 0 ;
}
return_error ( gs_error_invalidaccess ) ;
}
if ( dict_is_permanent_on_dstack ( op ) ) return_error ( gs_error_invalidaccess ) ;
}
return access_check ( i_ctx_p , 0 , true ) ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | int file_is_raw ( struct VpxInputContext * input ) {
uint8_t buf [ 32 ] ;
int is_raw = 0 ;
vpx_codec_stream_info_t si ;
si . sz = sizeof ( si ) ;
if ( fread ( buf , 1 , 32 , input -> file ) == 32 ) {
int i ;
if ( mem_get_le32 ( buf ) < 256 * 1024 * 1024 ) {
for ( i = 0 ;
i < get_vpx_decoder_count ( ) ;
++ i ) {
const VpxInterface * const decoder = get_vpx_decoder_by_index ( i ) ;
if ( ! vpx_codec_peek_stream_info ( decoder -> codec_interface ( ) , buf + 4 , 32 - 4 , & si ) ) {
is_raw = 1 ;
input -> fourcc = decoder -> fourcc ;
input -> width = si . w ;
input -> height = si . h ;
input -> framerate . numerator = 30 ;
input -> framerate . denominator = 1 ;
break ;
}
}
}
}
rewind ( input -> file ) ;
return is_raw ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static struct row_pairs * gtkui_connections_add ( struct conn_object * co , void * conn , struct row_pairs * * list ) {
GtkTreeIter iter ;
char flags [ 2 ] , src [ MAX_ASCII_ADDR_LEN ] , dst [ MAX_ASCII_ADDR_LEN ] ;
char proto [ 4 ] , status [ 8 ] , ccodes [ 8 ] ;
unsigned int src_port = 0 , dst_port = 0 , tx = 0 , rx = 0 ;
struct row_pairs * row = NULL ;
if ( ! list ) return ( NULL ) ;
memset ( & flags , 0 , sizeof ( flags ) ) ;
memset ( & proto , 0 , sizeof ( proto ) ) ;
memset ( & src , 0 , sizeof ( src ) ) ;
memset ( & dst , 0 , sizeof ( dst ) ) ;
memset ( & status , 0 , sizeof ( status ) ) ;
memset ( & ccodes , 0 , sizeof ( ccodes ) ) ;
conntrack_flagstr ( co , flags , sizeof ( flags ) ) ;
conntrack_statusstr ( co , status , sizeof ( status ) ) ;
conntrack_protostr ( co , proto , sizeof ( proto ) ) ;
conntrack_countrystr ( co , ccodes , sizeof ( ccodes ) ) ;
ip_addr_ntoa ( & co -> L3_addr1 , src ) ;
ip_addr_ntoa ( & co -> L3_addr2 , dst ) ;
src_port = ntohs ( co -> L4_addr1 ) ;
dst_port = ntohs ( co -> L4_addr2 ) ;
tx = co -> tx ;
rx = co -> rx ;
gtk_list_store_append ( ls_conns , & iter ) ;
gtk_list_store_set ( ls_conns , & iter , 0 , flags , 1 , src , 2 , src_port , 3 , "-" , 4 , dst , 5 , dst_port , 6 , proto , 7 , status , 8 , tx , 9 , rx , 10 , ccodes , 11 , conn , - 1 ) ;
if ( ! * list ) {
row = malloc ( sizeof ( struct row_pairs ) ) ;
if ( row == NULL ) {
USER_MSG ( "Failed create new connection row\n" ) ;
DEBUG_MSG ( "gktui_connections_add: failed to allocate memory for a new row" ) ;
}
row -> prev = NULL ;
}
else {
for ( row = * list ;
row && row -> next ;
row = row -> next ) ;
row -> next = malloc ( sizeof ( struct row_pairs ) ) ;
if ( row -> next == NULL ) {
USER_MSG ( "Failed create new connection row\n" ) ;
DEBUG_MSG ( "gktui_connections_add: failed to allocate memory for a new row" ) ;
}
row -> next -> prev = row ;
row = row -> next ;
}
row -> conn = conn ;
row -> iter = iter ;
row -> next = NULL ;
if ( ! * list ) * list = row ;
return ( row ) ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static int mem_resize ( jas_stream_memobj_t * m , int bufsize ) {
unsigned char * buf ;
assert ( m -> buf_ ) ;
assert ( bufsize >= 0 ) ;
if ( ! ( buf = jas_realloc2 ( m -> buf_ , bufsize , sizeof ( unsigned char ) ) ) ) {
return - 1 ;
}
m -> buf_ = buf ;
m -> bufsize_ = bufsize ;
return 0 ;
} | 1True
|
Categorize the following code snippet as vulnerable or not. True or False | static void virtio_net_handle_rx ( VirtIODevice * vdev , VirtQueue * vq ) {
VirtIONet * n = to_virtio_net ( vdev ) ;
qemu_flush_queued_packets ( & n -> nic -> nc ) ;
qemu_notify_event ( ) ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | int x_catch_free_colors ( Display * dpy , XErrorEvent * err ) {
if ( err -> request_code == X_FreeColors || x_error_handler . orighandler == x_catch_free_colors ) return 0 ;
return x_error_handler . orighandler ( dpy , err ) ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static void write_sync_code ( struct vp9_write_bit_buffer * wb ) {
vp9_wb_write_literal ( wb , VP9_SYNC_CODE_0 , 8 ) ;
vp9_wb_write_literal ( wb , VP9_SYNC_CODE_1 , 8 ) ;
vp9_wb_write_literal ( wb , VP9_SYNC_CODE_2 , 8 ) ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static bool e1000e_do_ps ( E1000ECore * core , struct NetRxPkt * pkt , size_t * hdr_len ) {
bool isip4 , isip6 , isudp , istcp ;
bool fragment ;
if ( ! e1000e_rx_use_ps_descriptor ( core ) ) {
return false ;
}
net_rx_pkt_get_protocols ( pkt , & isip4 , & isip6 , & isudp , & istcp ) ;
if ( isip4 ) {
fragment = net_rx_pkt_get_ip4_info ( pkt ) -> fragment ;
}
else if ( isip6 ) {
fragment = net_rx_pkt_get_ip6_info ( pkt ) -> fragment ;
}
else {
return false ;
}
if ( fragment && ( core -> mac [ RFCTL ] & E1000_RFCTL_IPFRSP_DIS ) ) {
return false ;
}
if ( ! fragment && ( isudp || istcp ) ) {
* hdr_len = net_rx_pkt_get_l5_hdr_offset ( pkt ) ;
}
else {
* hdr_len = net_rx_pkt_get_l4_hdr_offset ( pkt ) ;
}
if ( ( * hdr_len > core -> rxbuf_sizes [ 0 ] ) || ( * hdr_len > net_rx_pkt_get_total_len ( pkt ) ) ) {
return false ;
}
return true ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | void mime_scanner_append ( MIMEScanner * scanner , const char * data , int data_size ) {
int free_size = scanner -> m_line_size - scanner -> m_line_length ;
if ( data_size > free_size ) {
if ( scanner -> m_line_size == 0 ) {
scanner -> m_line_size = 128 ;
}
while ( free_size < data_size ) {
scanner -> m_line_size *= 2 ;
free_size = scanner -> m_line_size - scanner -> m_line_length ;
}
if ( scanner -> m_line == nullptr ) {
scanner -> m_line = ( char * ) ats_malloc ( scanner -> m_line_size ) ;
}
else {
scanner -> m_line = ( char * ) ats_realloc ( scanner -> m_line , scanner -> m_line_size ) ;
}
}
memcpy ( & ( scanner -> m_line [ scanner -> m_line_length ] ) , data , data_size ) ;
scanner -> m_line_length += data_size ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | REGRESSION_TEST ( SDK_API_TSCache ) ( RegressionTest * test , int , int * pstatus ) {
* pstatus = REGRESSION_TEST_INPROGRESS ;
SDK_Cache_test = test ;
SDK_Cache_pstatus = pstatus ;
int is_ready = 0 ;
TSCacheReady ( & is_ready ) ;
if ( ! is_ready ) {
SDK_RPRINT ( test , "TSCacheReady" , "TestCase1" , TC_FAIL , "cache is not ready" ) ;
* pstatus = REGRESSION_TEST_FAILED ;
return ;
}
else {
SDK_RPRINT ( test , "TSCacheReady" , "TestCase1" , TC_PASS , "ok" ) ;
}
char key_name [ ] = "key_for_regression_test" ;
TSCacheKey key = TSCacheKeyCreate ( ) ;
TSCacheKey key_cmp = TSCacheKeyCreate ( ) ;
SDK_RPRINT ( test , "TSCacheKeyCreate" , "TestCase1" , TC_PASS , "ok" ) ;
TSCacheKeyDigestSet ( key , key_name , strlen ( key_name ) ) ;
TSCacheKeyDigestSet ( key_cmp , key_name , strlen ( key_name ) ) ;
# if 0 if ( memcmp ( key , key_cmp , sizeof ( TSCacheKey ) ) != 0 ) {
SDK_RPRINT ( test , "TSCacheKeySetDigest" , "TestCase1" , TC_FAIL , "digest is wrong" ) ;
* pstatus = REGRESSION_TEST_FAILED ;
TSCacheKeyDestroy ( key ) ;
TSCacheKeyDestroy ( key_cmp ) ;
return ;
}
else {
SDK_RPRINT ( test , "TSCacheKeySetDigest" , "TestCase1" , TC_PASS , "ok" ) ;
TSCacheKeyDestroy ( key_cmp ) ;
}
# endif for ( int i = 0 ;
i < ( OBJECT_SIZE - 1 ) ;
i ++ ) {
content [ i ] = 'a' ;
}
content [ OBJECT_SIZE - 1 ] = '\0' ;
TSCont contp = TSContCreate ( cache_handler , TSMutexCreate ( ) ) ;
CacheVConnStruct * cache_vconn = ( CacheVConnStruct * ) TSmalloc ( sizeof ( CacheVConnStruct ) ) ;
cache_vconn -> key = key ;
TSContDataSet ( contp , cache_vconn ) ;
TSCacheWrite ( contp , key ) ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static int dissect_h245_OCTET_STRING_SIZE_1_256 ( tvbuff_t * tvb _U_ , int offset _U_ , asn1_ctx_t * actx _U_ , proto_tree * tree _U_ , int hf_index _U_ ) {
offset = dissect_per_octet_string ( tvb , offset , actx , tree , hf_index , 1 , 256 , FALSE , NULL ) ;
return offset ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | Oid get_ordering_op_for_equality_op ( Oid opno , bool use_lhs_type ) {
Oid result = InvalidOid ;
CatCList * catlist ;
int i ;
catlist = SearchSysCacheList1 ( AMOPOPID , ObjectIdGetDatum ( opno ) ) ;
for ( i = 0 ;
i < catlist -> n_members ;
i ++ ) {
HeapTuple tuple = & catlist -> members [ i ] -> tuple ;
Form_pg_amop aform = ( Form_pg_amop ) GETSTRUCT ( tuple ) ;
if ( aform -> amopmethod != BTREE_AM_OID ) continue ;
if ( aform -> amopstrategy == BTEqualStrategyNumber ) {
Oid typid ;
typid = use_lhs_type ? aform -> amoplefttype : aform -> amoprighttype ;
result = get_opfamily_member ( aform -> amopfamily , typid , typid , BTLessStrategyNumber ) ;
if ( OidIsValid ( result ) ) break ;
}
}
ReleaseSysCacheList ( catlist ) ;
return result ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | void vp9_iht8x8_add ( TX_TYPE tx_type , const int16_t * input , uint8_t * dest , int stride , int eob ) {
if ( tx_type == DCT_DCT ) {
vp9_idct8x8_add ( input , dest , stride , eob ) ;
}
else {
vp9_iht8x8_64_add ( input , dest , stride , tx_type ) ;
}
} | 1True
|
Categorize the following code snippet as vulnerable or not. True or False | static void raw ( struct parse * pcmd , FILE * fp ) {
rawmode = 1 ;
( void ) fprintf ( fp , "Output set to raw\n" ) ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | TSReturnCode TSUrlClone ( TSMBuffer dest_bufp , TSMBuffer src_bufp , TSMLoc src_url , TSMLoc * locp ) {
sdk_assert ( sdk_sanity_check_mbuffer ( src_bufp ) == TS_SUCCESS ) ;
sdk_assert ( sdk_sanity_check_mbuffer ( dest_bufp ) == TS_SUCCESS ) ;
sdk_assert ( sdk_sanity_check_url_handle ( src_url ) == TS_SUCCESS ) ;
sdk_assert ( sdk_sanity_check_null_ptr ( locp ) == TS_SUCCESS ) ;
if ( ! isWriteable ( dest_bufp ) ) {
return TS_ERROR ;
}
HdrHeap * s_heap , * d_heap ;
URLImpl * s_url , * d_url ;
s_heap = ( ( HdrHeapSDKHandle * ) src_bufp ) -> m_heap ;
d_heap = ( ( HdrHeapSDKHandle * ) dest_bufp ) -> m_heap ;
s_url = ( URLImpl * ) src_url ;
d_url = url_copy ( s_url , s_heap , d_heap , ( s_heap != d_heap ) ) ;
* locp = ( TSMLoc ) d_url ;
return TS_SUCCESS ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | METHOD ( x509_t , create_subjectAltName_enumerator , enumerator_t * , private_x509_cert_t * this ) {
return this -> subjectAltNames -> create_enumerator ( this -> subjectAltNames ) ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static void deep_count_callback ( GObject * source_object , GAsyncResult * res , gpointer user_data ) {
DeepCountState * state ;
GFileEnumerator * enumerator ;
NautilusFile * file ;
state = user_data ;
if ( state -> directory == NULL ) {
deep_count_state_free ( state ) ;
return ;
}
file = state -> directory -> details -> deep_count_file ;
enumerator = g_file_enumerate_children_finish ( G_FILE ( source_object ) , res , NULL ) ;
if ( enumerator == NULL ) {
file -> details -> deep_unreadable_count += 1 ;
deep_count_next_dir ( state ) ;
}
else {
state -> enumerator = enumerator ;
g_file_enumerator_next_files_async ( state -> enumerator , DIRECTORY_LOAD_ITEMS_PER_CALLBACK , G_PRIORITY_LOW , state -> cancellable , deep_count_more_files_callback , state ) ;
}
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static int parse_CInGroupSortAggregSets ( tvbuff_t * tvb , packet_info * pinfo , int offset , proto_tree * parent_tree , proto_tree * pad_tree , const char * fmt , ... ) {
guint32 cCount , i ;
proto_item * item ;
proto_tree * tree ;
const char * txt ;
va_list ap ;
va_start ( ap , fmt ) ;
txt = wmem_strdup_vprintf ( wmem_packet_scope ( ) , fmt , ap ) ;
va_end ( ap ) ;
tree = proto_tree_add_subtree ( parent_tree , tvb , offset , 0 , ett_CInGroupSortAggregSets , & item , txt ) ;
cCount = tvb_get_letohl ( tvb , offset ) ;
proto_tree_add_uint ( tree , hf_mswsp_cingroupsortaggregsets_count , tvb , offset , 4 , cCount ) ;
offset += 4 ;
for ( i = 0 ;
i < cCount ;
i ++ ) {
offset = parse_CInGroupSortAggregSet ( tvb , pinfo , offset , tree , pad_tree , "SortSets[%u]" , i ) ;
}
proto_item_set_end ( item , tvb , offset ) ;
return offset ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static int show_object_fast ( const unsigned char * sha1 , enum object_type type , int exclude , uint32_t name_hash , struct packed_git * found_pack , off_t found_offset ) {
fprintf ( stdout , "%s\n" , sha1_to_hex ( sha1 ) ) ;
return 1 ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | void key_type_put ( struct key_type * ktype ) {
up_read ( & key_types_sem ) ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static void proto_tree_set_ether ( field_info * fi , const guint8 * value ) {
fvalue_set_bytes ( & fi -> value , value ) ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static void jas_image_cmpt_destroy ( jas_image_cmpt_t * cmpt ) {
if ( cmpt -> stream_ ) {
jas_stream_close ( cmpt -> stream_ ) ;
}
jas_free ( cmpt ) ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static gint handle_message_sasl ( tvbuff_t * tvb , packet_info * pinfo , gint offset , proto_tree * message_tree ) {
gint return_value = offset ;
const sasl_cmd * command ;
command = find_sasl_command ( tvb , offset ) ;
if ( command ) {
gint newline_offset = tvb_find_guint8 ( tvb , offset + command -> length , - 1 , '\n' ) + 1 ;
if ( 0 == newline_offset ) {
if ( ( guint ) tvb_captured_length_remaining ( tvb , offset ) < MAX_SASL_PACKET_LENGTH && set_pinfo_desegment ( pinfo , offset , DESEGMENT_ONE_MORE_SEGMENT ) ) {
return_value = offset + command -> length ;
}
else {
return_value = 0 ;
}
return return_value ;
}
if ( newline_offset > 0 ) {
gint length = command -> length ;
col_add_fstr ( pinfo -> cinfo , COL_INFO , "SASL-%s" , command -> text ) ;
proto_tree_add_item ( message_tree , hf_alljoyn_sasl_command , tvb , offset , length , ENC_ASCII | ENC_NA ) ;
offset += length ;
length = newline_offset - offset ;
proto_tree_add_item ( message_tree , hf_alljoyn_sasl_parameter , tvb , offset , length , ENC_ASCII | ENC_NA ) ;
return_value = newline_offset ;
}
}
return return_value ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static int64_t rm_read_dts ( AVFormatContext * s , int stream_index , int64_t * ppos , int64_t pos_limit ) {
RMDemuxContext * rm = s -> priv_data ;
int64_t pos , dts ;
int stream_index2 , flags , len , h ;
pos = * ppos ;
if ( rm -> old_format ) return AV_NOPTS_VALUE ;
if ( avio_seek ( s -> pb , pos , SEEK_SET ) < 0 ) return AV_NOPTS_VALUE ;
rm -> remaining_len = 0 ;
for ( ;
;
) {
int seq = 1 ;
AVStream * st ;
len = rm_sync ( s , & dts , & flags , & stream_index2 , & pos ) ;
if ( len < 0 ) return AV_NOPTS_VALUE ;
st = s -> streams [ stream_index2 ] ;
if ( st -> codecpar -> codec_type == AVMEDIA_TYPE_VIDEO ) {
h = avio_r8 ( s -> pb ) ;
len -- ;
if ( ! ( h & 0x40 ) ) {
seq = avio_r8 ( s -> pb ) ;
len -- ;
}
}
if ( ( flags & 2 ) && ( seq & 0x7F ) == 1 ) {
av_log ( s , AV_LOG_TRACE , "%d %d-%d %" PRId64 " %d\n" , flags , stream_index2 , stream_index , dts , seq ) ;
av_add_index_entry ( st , pos , dts , 0 , 0 , AVINDEX_KEYFRAME ) ;
if ( stream_index2 == stream_index ) break ;
}
avio_skip ( s -> pb , len ) ;
}
* ppos = pos ;
return dts ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | int jas_stream_isseekable ( jas_stream_t * stream ) {
if ( stream -> ops_ == & jas_stream_memops ) {
return 1 ;
}
else if ( stream -> ops_ == & jas_stream_fileops ) {
if ( ( * stream -> ops_ -> seek_ ) ( stream -> obj_ , 0 , SEEK_CUR ) < 0 ) {
return 0 ;
}
return 1 ;
}
else {
return 0 ;
}
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | TEST_F ( ExternalProtocolHandlerTest , TestGetBlockStateDefaultBlock ) {
ExternalProtocolHandler : : BlockState block_state = ExternalProtocolHandler : : GetBlockState ( "afp" , profile_ . get ( ) ) ;
EXPECT_EQ ( ExternalProtocolHandler : : BLOCK , block_state ) ;
EXPECT_TRUE ( local_state_ -> GetDictionary ( prefs : : kExcludedSchemes ) -> empty ( ) ) ;
EXPECT_TRUE ( profile_ -> GetPrefs ( ) -> GetDictionary ( prefs : : kExcludedSchemes ) -> empty ( ) ) ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static void render_slice ( Vp3DecodeContext * s , int slice ) {
int x , y , i , j , fragment ;
int16_t * block = s -> block ;
int motion_x = 0xdeadbeef , motion_y = 0xdeadbeef ;
int motion_halfpel_index ;
uint8_t * motion_source ;
int plane , first_pixel ;
if ( slice >= s -> c_superblock_height ) return ;
for ( plane = 0 ;
plane < 3 ;
plane ++ ) {
uint8_t * output_plane = s -> current_frame . data [ plane ] + s -> data_offset [ plane ] ;
uint8_t * last_plane = s -> last_frame . data [ plane ] + s -> data_offset [ plane ] ;
uint8_t * golden_plane = s -> golden_frame . data [ plane ] + s -> data_offset [ plane ] ;
int stride = s -> current_frame . linesize [ plane ] ;
int plane_width = s -> width >> ( plane && s -> chroma_x_shift ) ;
int plane_height = s -> height >> ( plane && s -> chroma_y_shift ) ;
int8_t ( * motion_val ) [ 2 ] = s -> motion_val [ ! ! plane ] ;
int sb_x , sb_y = slice << ( ! plane && s -> chroma_y_shift ) ;
int slice_height = sb_y + 1 + ( ! plane && s -> chroma_y_shift ) ;
int slice_width = plane ? s -> c_superblock_width : s -> y_superblock_width ;
int fragment_width = s -> fragment_width [ ! ! plane ] ;
int fragment_height = s -> fragment_height [ ! ! plane ] ;
int fragment_start = s -> fragment_start [ plane ] ;
int do_await = ! plane && HAVE_THREADS && ( s -> avctx -> active_thread_type & FF_THREAD_FRAME ) ;
if ( ! s -> flipped_image ) stride = - stride ;
if ( CONFIG_GRAY && plane && ( s -> avctx -> flags & CODEC_FLAG_GRAY ) ) continue ;
for ( ;
sb_y < slice_height ;
sb_y ++ ) {
for ( sb_x = 0 ;
sb_x < slice_width ;
sb_x ++ ) {
for ( j = 0 ;
j < 16 ;
j ++ ) {
x = 4 * sb_x + hilbert_offset [ j ] [ 0 ] ;
y = 4 * sb_y + hilbert_offset [ j ] [ 1 ] ;
fragment = y * fragment_width + x ;
i = fragment_start + fragment ;
if ( x >= fragment_width || y >= fragment_height ) continue ;
first_pixel = 8 * y * stride + 8 * x ;
if ( do_await && s -> all_fragments [ i ] . coding_method != MODE_INTRA ) await_reference_row ( s , & s -> all_fragments [ i ] , motion_val [ fragment ] [ 1 ] , ( 16 * y ) >> s -> chroma_y_shift ) ;
if ( s -> all_fragments [ i ] . coding_method != MODE_COPY ) {
if ( ( s -> all_fragments [ i ] . coding_method == MODE_USING_GOLDEN ) || ( s -> all_fragments [ i ] . coding_method == MODE_GOLDEN_MV ) ) motion_source = golden_plane ;
else motion_source = last_plane ;
motion_source += first_pixel ;
motion_halfpel_index = 0 ;
if ( ( s -> all_fragments [ i ] . coding_method > MODE_INTRA ) && ( s -> all_fragments [ i ] . coding_method != MODE_USING_GOLDEN ) ) {
int src_x , src_y ;
motion_x = motion_val [ fragment ] [ 0 ] ;
motion_y = motion_val [ fragment ] [ 1 ] ;
src_x = ( motion_x >> 1 ) + 8 * x ;
src_y = ( motion_y >> 1 ) + 8 * y ;
motion_halfpel_index = motion_x & 0x01 ;
motion_source += ( motion_x >> 1 ) ;
motion_halfpel_index |= ( motion_y & 0x01 ) << 1 ;
motion_source += ( ( motion_y >> 1 ) * stride ) ;
if ( src_x < 0 || src_y < 0 || src_x + 9 >= plane_width || src_y + 9 >= plane_height ) {
uint8_t * temp = s -> edge_emu_buffer ;
if ( stride < 0 ) temp -= 8 * stride ;
s -> vdsp . emulated_edge_mc ( temp , motion_source , stride , 9 , 9 , src_x , src_y , plane_width , plane_height ) ;
motion_source = temp ;
}
}
if ( s -> all_fragments [ i ] . coding_method != MODE_INTRA ) {
if ( motion_halfpel_index != 3 ) {
s -> dsp . put_no_rnd_pixels_tab [ 1 ] [ motion_halfpel_index ] ( output_plane + first_pixel , motion_source , stride , 8 ) ;
}
else {
int d = ( motion_x ^ motion_y ) >> 31 ;
s -> vp3dsp . put_no_rnd_pixels_l2 ( output_plane + first_pixel , motion_source - d , motion_source + stride + 1 + d , stride , 8 ) ;
}
}
if ( s -> all_fragments [ i ] . coding_method == MODE_INTRA ) {
int index ;
index = vp3_dequant ( s , s -> all_fragments + i , plane , 0 , block ) ;
if ( index > 63 ) continue ;
s -> vp3dsp . idct_put ( output_plane + first_pixel , stride , block ) ;
}
else {
int index = vp3_dequant ( s , s -> all_fragments + i , plane , 1 , block ) ;
if ( index > 63 ) continue ;
if ( index > 0 ) {
s -> vp3dsp . idct_add ( output_plane + first_pixel , stride , block ) ;
}
else {
s -> vp3dsp . idct_dc_add ( output_plane + first_pixel , stride , block ) ;
}
}
}
else {
s -> dsp . put_pixels_tab [ 1 ] [ 0 ] ( output_plane + first_pixel , last_plane + first_pixel , stride , 8 ) ;
}
}
}
if ( ! s -> skip_loop_filter ) apply_loop_filter ( s , plane , 4 * sb_y - ! ! sb_y , FFMIN ( 4 * sb_y + 3 , fragment_height - 1 ) ) ;
}
}
vp3_draw_horiz_band ( s , FFMIN ( ( 32 << s -> chroma_y_shift ) * ( slice + 1 ) - 16 , s -> height - 16 ) ) ;
} | 1True
|
Categorize the following code snippet as vulnerable or not. True or False | static int mp_decode_layer3 ( MPADecodeContext * s ) {
int nb_granules , main_data_begin ;
int gr , ch , blocksplit_flag , i , j , k , n , bits_pos ;
GranuleDef * g ;
int16_t exponents [ 576 ] ;
if ( s -> lsf ) {
main_data_begin = get_bits ( & s -> gb , 8 ) ;
skip_bits ( & s -> gb , s -> nb_channels ) ;
nb_granules = 1 ;
}
else {
main_data_begin = get_bits ( & s -> gb , 9 ) ;
if ( s -> nb_channels == 2 ) skip_bits ( & s -> gb , 3 ) ;
else skip_bits ( & s -> gb , 5 ) ;
nb_granules = 2 ;
for ( ch = 0 ;
ch < s -> nb_channels ;
ch ++ ) {
s -> granules [ ch ] [ 0 ] . scfsi = 0 ;
s -> granules [ ch ] [ 1 ] . scfsi = get_bits ( & s -> gb , 4 ) ;
}
}
for ( gr = 0 ;
gr < nb_granules ;
gr ++ ) {
for ( ch = 0 ;
ch < s -> nb_channels ;
ch ++ ) {
av_dlog ( s -> avctx , "gr=%d ch=%d: side_info\n" , gr , ch ) ;
g = & s -> granules [ ch ] [ gr ] ;
g -> part2_3_length = get_bits ( & s -> gb , 12 ) ;
g -> big_values = get_bits ( & s -> gb , 9 ) ;
if ( g -> big_values > 288 ) {
av_log ( s -> avctx , AV_LOG_ERROR , "big_values too big\n" ) ;
return AVERROR_INVALIDDATA ;
}
g -> global_gain = get_bits ( & s -> gb , 8 ) ;
if ( ( s -> mode_ext & ( MODE_EXT_MS_STEREO | MODE_EXT_I_STEREO ) ) == MODE_EXT_MS_STEREO ) g -> global_gain -= 2 ;
if ( s -> lsf ) g -> scalefac_compress = get_bits ( & s -> gb , 9 ) ;
else g -> scalefac_compress = get_bits ( & s -> gb , 4 ) ;
blocksplit_flag = get_bits1 ( & s -> gb ) ;
if ( blocksplit_flag ) {
g -> block_type = get_bits ( & s -> gb , 2 ) ;
if ( g -> block_type == 0 ) {
av_log ( s -> avctx , AV_LOG_ERROR , "invalid block type\n" ) ;
return AVERROR_INVALIDDATA ;
}
g -> switch_point = get_bits1 ( & s -> gb ) ;
for ( i = 0 ;
i < 2 ;
i ++ ) g -> table_select [ i ] = get_bits ( & s -> gb , 5 ) ;
for ( i = 0 ;
i < 3 ;
i ++ ) g -> subblock_gain [ i ] = get_bits ( & s -> gb , 3 ) ;
ff_init_short_region ( s , g ) ;
}
else {
int region_address1 , region_address2 ;
g -> block_type = 0 ;
g -> switch_point = 0 ;
for ( i = 0 ;
i < 3 ;
i ++ ) g -> table_select [ i ] = get_bits ( & s -> gb , 5 ) ;
region_address1 = get_bits ( & s -> gb , 4 ) ;
region_address2 = get_bits ( & s -> gb , 3 ) ;
av_dlog ( s -> avctx , "region1=%d region2=%d\n" , region_address1 , region_address2 ) ;
ff_init_long_region ( s , g , region_address1 , region_address2 ) ;
}
ff_region_offset2size ( g ) ;
ff_compute_band_indexes ( s , g ) ;
g -> preflag = 0 ;
if ( ! s -> lsf ) g -> preflag = get_bits1 ( & s -> gb ) ;
g -> scalefac_scale = get_bits1 ( & s -> gb ) ;
g -> count1table_select = get_bits1 ( & s -> gb ) ;
av_dlog ( s -> avctx , "block_type=%d switch_point=%d\n" , g -> block_type , g -> switch_point ) ;
}
}
if ( ! s -> adu_mode ) {
int skip ;
const uint8_t * ptr = s -> gb . buffer + ( get_bits_count ( & s -> gb ) >> 3 ) ;
int extrasize = av_clip ( get_bits_left ( & s -> gb ) >> 3 , 0 , FFMAX ( 0 , LAST_BUF_SIZE - s -> last_buf_size ) ) ;
assert ( ( get_bits_count ( & s -> gb ) & 7 ) == 0 ) ;
av_dlog ( s -> avctx , "seekback:%d, lastbuf:%d\n" , main_data_begin , s -> last_buf_size ) ;
memcpy ( s -> last_buf + s -> last_buf_size , ptr , extrasize ) ;
s -> in_gb = s -> gb ;
init_get_bits ( & s -> gb , s -> last_buf , s -> last_buf_size * 8 ) ;
# if ! UNCHECKED_BITSTREAM_READER s -> gb . size_in_bits_plus8 += extrasize * 8 ;
# endif s -> last_buf_size <<= 3 ;
for ( gr = 0 ;
gr < nb_granules && ( s -> last_buf_size >> 3 ) < main_data_begin ;
gr ++ ) {
for ( ch = 0 ;
ch < s -> nb_channels ;
ch ++ ) {
g = & s -> granules [ ch ] [ gr ] ;
s -> last_buf_size += g -> part2_3_length ;
memset ( g -> sb_hybrid , 0 , sizeof ( g -> sb_hybrid ) ) ;
compute_imdct ( s , g , & s -> sb_samples [ ch ] [ 18 * gr ] [ 0 ] , s -> mdct_buf [ ch ] ) ;
}
}
skip = s -> last_buf_size - 8 * main_data_begin ;
if ( skip >= s -> gb . size_in_bits && s -> in_gb . buffer ) {
skip_bits_long ( & s -> in_gb , skip - s -> gb . size_in_bits ) ;
s -> gb = s -> in_gb ;
s -> in_gb . buffer = NULL ;
}
else {
skip_bits_long ( & s -> gb , skip ) ;
}
}
else {
gr = 0 ;
}
for ( ;
gr < nb_granules ;
gr ++ ) {
for ( ch = 0 ;
ch < s -> nb_channels ;
ch ++ ) {
g = & s -> granules [ ch ] [ gr ] ;
bits_pos = get_bits_count ( & s -> gb ) ;
if ( ! s -> lsf ) {
uint8_t * sc ;
int slen , slen1 , slen2 ;
slen1 = slen_table [ 0 ] [ g -> scalefac_compress ] ;
slen2 = slen_table [ 1 ] [ g -> scalefac_compress ] ;
av_dlog ( s -> avctx , "slen1=%d slen2=%d\n" , slen1 , slen2 ) ;
if ( g -> block_type == 2 ) {
n = g -> switch_point ? 17 : 18 ;
j = 0 ;
if ( slen1 ) {
for ( i = 0 ;
i < n ;
i ++ ) g -> scale_factors [ j ++ ] = get_bits ( & s -> gb , slen1 ) ;
}
else {
for ( i = 0 ;
i < n ;
i ++ ) g -> scale_factors [ j ++ ] = 0 ;
}
if ( slen2 ) {
for ( i = 0 ;
i < 18 ;
i ++ ) g -> scale_factors [ j ++ ] = get_bits ( & s -> gb , slen2 ) ;
for ( i = 0 ;
i < 3 ;
i ++ ) g -> scale_factors [ j ++ ] = 0 ;
}
else {
for ( i = 0 ;
i < 21 ;
i ++ ) g -> scale_factors [ j ++ ] = 0 ;
}
}
else {
sc = s -> granules [ ch ] [ 0 ] . scale_factors ;
j = 0 ;
for ( k = 0 ;
k < 4 ;
k ++ ) {
n = k == 0 ? 6 : 5 ;
if ( ( g -> scfsi & ( 0x8 >> k ) ) == 0 ) {
slen = ( k < 2 ) ? slen1 : slen2 ;
if ( slen ) {
for ( i = 0 ;
i < n ;
i ++ ) g -> scale_factors [ j ++ ] = get_bits ( & s -> gb , slen ) ;
}
else {
for ( i = 0 ;
i < n ;
i ++ ) g -> scale_factors [ j ++ ] = 0 ;
}
}
else {
for ( i = 0 ;
i < n ;
i ++ ) {
g -> scale_factors [ j ] = sc [ j ] ;
j ++ ;
}
}
}
g -> scale_factors [ j ++ ] = 0 ;
}
}
else {
int tindex , tindex2 , slen [ 4 ] , sl , sf ;
if ( g -> block_type == 2 ) tindex = g -> switch_point ? 2 : 1 ;
else tindex = 0 ;
sf = g -> scalefac_compress ;
if ( ( s -> mode_ext & MODE_EXT_I_STEREO ) && ch == 1 ) {
sf >>= 1 ;
if ( sf < 180 ) {
lsf_sf_expand ( slen , sf , 6 , 6 , 0 ) ;
tindex2 = 3 ;
}
else if ( sf < 244 ) {
lsf_sf_expand ( slen , sf - 180 , 4 , 4 , 0 ) ;
tindex2 = 4 ;
}
else {
lsf_sf_expand ( slen , sf - 244 , 3 , 0 , 0 ) ;
tindex2 = 5 ;
}
}
else {
if ( sf < 400 ) {
lsf_sf_expand ( slen , sf , 5 , 4 , 4 ) ;
tindex2 = 0 ;
}
else if ( sf < 500 ) {
lsf_sf_expand ( slen , sf - 400 , 5 , 4 , 0 ) ;
tindex2 = 1 ;
}
else {
lsf_sf_expand ( slen , sf - 500 , 3 , 0 , 0 ) ;
tindex2 = 2 ;
g -> preflag = 1 ;
}
}
j = 0 ;
for ( k = 0 ;
k < 4 ;
k ++ ) {
n = lsf_nsf_table [ tindex2 ] [ tindex ] [ k ] ;
sl = slen [ k ] ;
if ( sl ) {
for ( i = 0 ;
i < n ;
i ++ ) g -> scale_factors [ j ++ ] = get_bits ( & s -> gb , sl ) ;
}
else {
for ( i = 0 ;
i < n ;
i ++ ) g -> scale_factors [ j ++ ] = 0 ;
}
}
for ( ;
j < 40 ;
j ++ ) g -> scale_factors [ j ] = 0 ;
}
exponents_from_scale_factors ( s , g , exponents ) ;
huffman_decode ( s , g , exponents , bits_pos + g -> part2_3_length ) ;
}
if ( s -> mode == MPA_JSTEREO ) compute_stereo ( s , & s -> granules [ 0 ] [ gr ] , & s -> granules [ 1 ] [ gr ] ) ;
for ( ch = 0 ;
ch < s -> nb_channels ;
ch ++ ) {
g = & s -> granules [ ch ] [ gr ] ;
reorder_block ( s , g ) ;
compute_antialias ( s , g ) ;
compute_imdct ( s , g , & s -> sb_samples [ ch ] [ 18 * gr ] [ 0 ] , s -> mdct_buf [ ch ] ) ;
}
}
if ( get_bits_count ( & s -> gb ) < 0 ) skip_bits_long ( & s -> gb , - get_bits_count ( & s -> gb ) ) ;
return nb_granules * 18 ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static void write_mb_modes_kf ( const VP9_COMMON * cm , const MACROBLOCKD * xd , MODE_INFO * * mi_8x8 , vp9_writer * w ) {
const struct segmentation * const seg = & cm -> seg ;
const MODE_INFO * const mi = mi_8x8 [ 0 ] ;
const MODE_INFO * const above_mi = mi_8x8 [ - xd -> mi_stride ] ;
const MODE_INFO * const left_mi = xd -> left_available ? mi_8x8 [ - 1 ] : NULL ;
const MB_MODE_INFO * const mbmi = & mi -> mbmi ;
const BLOCK_SIZE bsize = mbmi -> sb_type ;
if ( seg -> update_map ) write_segment_id ( w , seg , mbmi -> segment_id ) ;
write_skip ( cm , xd , mbmi -> segment_id , mi , w ) ;
if ( bsize >= BLOCK_8X8 && cm -> tx_mode == TX_MODE_SELECT ) write_selected_tx_size ( cm , xd , mbmi -> tx_size , bsize , w ) ;
if ( bsize >= BLOCK_8X8 ) {
write_intra_mode ( w , mbmi -> mode , get_y_mode_probs ( mi , above_mi , left_mi , 0 ) ) ;
}
else {
const int num_4x4_w = num_4x4_blocks_wide_lookup [ bsize ] ;
const int num_4x4_h = num_4x4_blocks_high_lookup [ bsize ] ;
int idx , idy ;
for ( idy = 0 ;
idy < 2 ;
idy += num_4x4_h ) {
for ( idx = 0 ;
idx < 2 ;
idx += num_4x4_w ) {
const int block = idy * 2 + idx ;
write_intra_mode ( w , mi -> bmi [ block ] . as_mode , get_y_mode_probs ( mi , above_mi , left_mi , block ) ) ;
}
}
}
write_intra_mode ( w , mbmi -> uv_mode , vp9_kf_uv_mode_prob [ mbmi -> mode ] ) ;
} | 1True
|
Categorize the following code snippet as vulnerable or not. True or False | int xmlHashAddEntry ( xmlHashTablePtr table , const xmlChar * name , void * userdata ) {
return ( xmlHashAddEntry3 ( table , name , NULL , NULL , userdata ) ) ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static void loopfilter_frame ( VP9_COMP * cpi , VP9_COMMON * cm ) {
MACROBLOCKD * xd = & cpi -> mb . e_mbd ;
struct loopfilter * lf = & cm -> lf ;
if ( xd -> lossless ) {
lf -> filter_level = 0 ;
}
else {
struct vpx_usec_timer timer ;
vp9_clear_system_state ( ) ;
vpx_usec_timer_start ( & timer ) ;
vp9_pick_filter_level ( cpi -> Source , cpi , cpi -> sf . lpf_pick ) ;
vpx_usec_timer_mark ( & timer ) ;
cpi -> time_pick_lpf += vpx_usec_timer_elapsed ( & timer ) ;
}
if ( lf -> filter_level > 0 ) {
vp9_loop_filter_frame ( cm -> frame_to_show , cm , xd , lf -> filter_level , 0 , 0 ) ;
}
vp9_extend_frame_inner_borders ( cm -> frame_to_show ) ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static int dissect_h245_INTEGER_1_16 ( tvbuff_t * tvb _U_ , int offset _U_ , asn1_ctx_t * actx _U_ , proto_tree * tree _U_ , int hf_index _U_ ) {
offset = dissect_per_constrained_integer ( tvb , offset , actx , tree , hf_index , 1U , 16U , NULL , FALSE ) ;
return offset ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static void process_rtp_payload ( tvbuff_t * newtvb , packet_info * pinfo , proto_tree * tree , proto_tree * rtp_tree , unsigned int payload_type ) {
struct _rtp_conversation_info * p_conv_data = NULL ;
gboolean found_match = FALSE ;
int payload_len ;
struct srtp_info * srtp_info ;
int offset = 0 ;
payload_len = tvb_length_remaining ( newtvb , offset ) ;
p_conv_data = ( struct _rtp_conversation_info * ) p_get_proto_data ( wmem_file_scope ( ) , pinfo , proto_rtp , 0 ) ;
if ( p_conv_data && p_conv_data -> srtp_info ) {
srtp_info = p_conv_data -> srtp_info ;
payload_len -= srtp_info -> mki_len + srtp_info -> auth_tag_len ;
# if 0 # error Currently the srtp_info structure contains no cipher data , see packet - sdp . c adding dummy_srtp_info structure if ( p_conv_data -> srtp_info -> encryption_algorithm == SRTP_ENC_ALG_NULL ) {
if ( rtp_tree ) proto_tree_add_text ( rtp_tree , newtvb , offset , payload_len , "SRTP Payload with NULL encryption" ) ;
}
else # endif {
if ( rtp_tree ) proto_tree_add_item ( rtp_tree , hf_srtp_encrypted_payload , newtvb , offset , payload_len , ENC_NA ) ;
found_match = TRUE ;
}
offset += payload_len ;
if ( srtp_info -> mki_len ) {
proto_tree_add_item ( rtp_tree , hf_srtp_mki , newtvb , offset , srtp_info -> mki_len , ENC_NA ) ;
offset += srtp_info -> mki_len ;
}
if ( srtp_info -> auth_tag_len ) {
proto_tree_add_item ( rtp_tree , hf_srtp_auth_tag , newtvb , offset , srtp_info -> auth_tag_len , ENC_NA ) ;
}
}
else if ( p_conv_data && ! p_conv_data -> bta2dp_info && ! p_conv_data -> btvdp_info && payload_type >= PT_UNDF_96 && payload_type <= PT_UNDF_127 ) {
if ( p_conv_data && p_conv_data -> rtp_dyn_payload ) {
gchar * payload_type_str = NULL ;
encoding_name_and_rate_t * encoding_name_and_rate_pt = NULL ;
encoding_name_and_rate_pt = ( encoding_name_and_rate_t * ) g_hash_table_lookup ( p_conv_data -> rtp_dyn_payload , & payload_type ) ;
if ( encoding_name_and_rate_pt ) {
payload_type_str = encoding_name_and_rate_pt -> encoding_name ;
}
if ( payload_type_str ) {
found_match = dissector_try_string ( rtp_dyn_pt_dissector_table , payload_type_str , newtvb , pinfo , tree , NULL ) ;
if ( found_match == FALSE ) proto_tree_add_item ( rtp_tree , hf_rtp_data , newtvb , 0 , - 1 , ENC_NA ) ;
return ;
}
}
}
else if ( p_conv_data && p_conv_data -> bta2dp_info ) {
tvbuff_t * nexttvb ;
gint suboffset = 0 ;
found_match = TRUE ;
if ( p_conv_data -> bta2dp_info -> content_protection_type == BTAVDTP_CONTENT_PROTECTION_TYPE_SCMS_T ) {
nexttvb = tvb_new_subset ( newtvb , 0 , 1 , 1 ) ;
call_dissector ( bta2dp_content_protection_header_scms_t , nexttvb , pinfo , tree ) ;
suboffset = 1 ;
}
nexttvb = tvb_new_subset_remaining ( newtvb , suboffset ) ;
if ( p_conv_data -> bta2dp_info -> codec_dissector ) call_dissector ( p_conv_data -> bta2dp_info -> codec_dissector , nexttvb , pinfo , tree ) ;
else call_dissector ( data_handle , nexttvb , pinfo , tree ) ;
}
else if ( p_conv_data && p_conv_data -> btvdp_info ) {
tvbuff_t * nexttvb ;
gint suboffset = 0 ;
found_match = TRUE ;
if ( p_conv_data -> btvdp_info -> content_protection_type == BTAVDTP_CONTENT_PROTECTION_TYPE_SCMS_T ) {
nexttvb = tvb_new_subset ( newtvb , 0 , 1 , 1 ) ;
call_dissector ( bta2dp_content_protection_header_scms_t , nexttvb , pinfo , tree ) ;
suboffset = 1 ;
}
nexttvb = tvb_new_subset_remaining ( newtvb , suboffset ) ;
if ( p_conv_data -> btvdp_info -> codec_dissector ) call_dissector ( p_conv_data -> btvdp_info -> codec_dissector , nexttvb , pinfo , tree ) ;
else call_dissector ( data_handle , nexttvb , pinfo , tree ) ;
}
if ( ! found_match && ! dissector_try_uint ( rtp_pt_dissector_table , payload_type , newtvb , pinfo , tree ) ) proto_tree_add_item ( rtp_tree , hf_rtp_data , newtvb , 0 , - 1 , ENC_NA ) ;
} | 1True
|
Categorize the following code snippet as vulnerable or not. True or False | static cmsBool WriteOffsetArray ( cmsIOHANDLER * io , _cmsDICarray * a , cmsUInt32Number Count , cmsUInt32Number Length ) {
cmsUInt32Number i ;
for ( i = 0 ;
i < Count ;
i ++ ) {
if ( ! WriteOneElem ( io , & a -> Name , i ) ) return FALSE ;
if ( ! WriteOneElem ( io , & a -> Value , i ) ) return FALSE ;
if ( Length > 16 ) {
if ( ! WriteOneElem ( io , & a -> DisplayName , i ) ) return FALSE ;
}
if ( Length > 24 ) {
if ( ! WriteOneElem ( io , & a -> DisplayValue , i ) ) return FALSE ;
}
}
return TRUE ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | TSReturnCode TSMimeHdrFieldClone ( TSMBuffer dest_bufp , TSMLoc dest_hdr , TSMBuffer src_bufp , TSMLoc src_hdr , TSMLoc src_field , TSMLoc * locp ) {
sdk_assert ( sdk_sanity_check_mbuffer ( dest_bufp ) == TS_SUCCESS ) ;
sdk_assert ( sdk_sanity_check_mbuffer ( src_bufp ) == TS_SUCCESS ) ;
sdk_assert ( ( sdk_sanity_check_mime_hdr_handle ( dest_hdr ) == TS_SUCCESS ) || ( sdk_sanity_check_http_hdr_handle ( dest_hdr ) == TS_SUCCESS ) ) ;
sdk_assert ( ( sdk_sanity_check_mime_hdr_handle ( src_hdr ) == TS_SUCCESS ) || ( sdk_sanity_check_http_hdr_handle ( src_hdr ) == TS_SUCCESS ) ) ;
sdk_assert ( sdk_sanity_check_field_handle ( src_field , src_hdr ) == TS_SUCCESS ) ;
sdk_assert ( sdk_sanity_check_null_ptr ( ( void * ) locp ) == TS_SUCCESS ) ;
if ( ! isWriteable ( dest_bufp ) ) {
return TS_ERROR ;
}
if ( TSMimeHdrFieldCreate ( dest_bufp , dest_hdr , locp ) == TS_SUCCESS ) {
TSMimeHdrFieldCopy ( dest_bufp , dest_hdr , * locp , src_bufp , src_hdr , src_field ) ;
return TS_SUCCESS ;
}
return TS_ERROR ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static int validate_raw_date ( const char * src , struct strbuf * result ) {
const char * orig_src = src ;
char * endp ;
unsigned long num ;
errno = 0 ;
num = strtoul ( src , & endp , 10 ) ;
if ( errno || endp == src || * endp != ' ' ) return - 1 ;
src = endp + 1 ;
if ( * src != '-' && * src != '+' ) return - 1 ;
num = strtoul ( src + 1 , & endp , 10 ) ;
if ( errno || endp == src + 1 || * endp || 1400 < num ) return - 1 ;
strbuf_addstr ( result , orig_src ) ;
return 0 ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static int next_code ( struct archive_read_filter * self ) {
struct private_data * state = ( struct private_data * ) self -> data ;
int code , newcode ;
static int debug_buff [ 1024 ] ;
static unsigned debug_index ;
code = newcode = getbits ( self , state -> bits ) ;
if ( code < 0 ) return ( code ) ;
debug_buff [ debug_index ++ ] = code ;
if ( debug_index >= sizeof ( debug_buff ) / sizeof ( debug_buff [ 0 ] ) ) debug_index = 0 ;
if ( ( code == 256 ) && state -> use_reset_code ) {
int skip_bytes = state -> bits - ( state -> bytes_in_section % state -> bits ) ;
skip_bytes %= state -> bits ;
state -> bits_avail = 0 ;
while ( skip_bytes -- > 0 ) {
code = getbits ( self , 8 ) ;
if ( code < 0 ) return ( code ) ;
}
state -> bytes_in_section = 0 ;
state -> bits = 9 ;
state -> section_end_code = ( 1 << state -> bits ) - 1 ;
state -> free_ent = 257 ;
state -> oldcode = - 1 ;
return ( next_code ( self ) ) ;
}
if ( code > state -> free_ent || ( code == state -> free_ent && state -> oldcode < 0 ) ) {
archive_set_error ( & ( self -> archive -> archive ) , - 1 , "Invalid compressed data" ) ;
return ( ARCHIVE_FATAL ) ;
}
if ( code >= state -> free_ent ) {
* state -> stackp ++ = state -> finbyte ;
code = state -> oldcode ;
}
while ( code >= 256 ) {
* state -> stackp ++ = state -> suffix [ code ] ;
code = state -> prefix [ code ] ;
}
* state -> stackp ++ = state -> finbyte = code ;
code = state -> free_ent ;
if ( code < state -> maxcode && state -> oldcode >= 0 ) {
state -> prefix [ code ] = state -> oldcode ;
state -> suffix [ code ] = state -> finbyte ;
++ state -> free_ent ;
}
if ( state -> free_ent > state -> section_end_code ) {
state -> bits ++ ;
state -> bytes_in_section = 0 ;
if ( state -> bits == state -> maxcode_bits ) state -> section_end_code = state -> maxcode ;
else state -> section_end_code = ( 1 << state -> bits ) - 1 ;
}
state -> oldcode = newcode ;
return ( ARCHIVE_OK ) ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static int sort_key_read ( MI_SORT_PARAM * sort_param , void * key ) {
int error ;
SORT_INFO * sort_info = sort_param -> sort_info ;
MI_INFO * info = sort_info -> info ;
DBUG_ENTER ( "sort_key_read" ) ;
if ( ( error = sort_get_next_record ( sort_param ) ) ) DBUG_RETURN ( error ) ;
if ( info -> state -> records == sort_info -> max_records ) {
mi_check_print_error ( sort_info -> param , "Key %d - Found too many records;
Can't continue" , sort_param -> key + 1 ) ;
DBUG_RETURN ( 1 ) ;
}
sort_param -> real_key_length = ( info -> s -> rec_reflength + _mi_make_key ( info , sort_param -> key , ( uchar * ) key , sort_param -> record , sort_param -> filepos ) ) ;
# ifdef HAVE_purify bzero ( key + sort_param -> real_key_length , ( sort_param -> key_length - sort_param -> real_key_length ) ) ;
# endif DBUG_RETURN ( sort_write_record ( sort_param ) ) ;
} | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static ossl_inline STACK_OF ( t1 ) * sk_ ## t1 ## _new_reserve ( sk_ ## t1 ## _compfunc compare , int n ) {
return ( STACK_OF ( t1 ) * ) OPENSSL_sk_new_reserve ( ( OPENSSL_sk_compfunc ) compare , n ) ;
}
static ossl_inline int sk_ ## t1 ## _reserve ( STACK_OF ( t1 ) * sk , int n ) {
return OPENSSL_sk_reserve ( ( OPENSSL_STACK * ) sk , n ) ;
}
static ossl_inline void sk_ ## t1 ## _free ( STACK_OF ( t1 ) * sk ) {
OPENSSL_sk_free ( ( OPENSSL_STACK * ) sk ) ;
}
static ossl_inline void sk_ ## t1 ## _zero ( STACK_OF ( t1 ) * sk ) {
OPENSSL_sk_zero ( ( OPENSSL_STACK * ) sk ) ;
}
static ossl_inline t2 * sk_ ## t1 ## _delete ( STACK_OF ( t1 ) * sk , int i ) {
return ( t2 * ) OPENSSL_sk_delete ( ( OPENSSL_STACK * ) sk , i ) ;
}
static ossl_inline t2 * sk_ ## t1 ## _delete_ptr ( STACK_OF ( t1 ) * sk , t2 * ptr ) {
return ( t2 * ) OPENSSL_sk_delete_ptr ( ( OPENSSL_STACK * ) sk , ( const void * ) ptr ) ;
}
static ossl_inline int sk_ ## t1 ## _push ( STACK_OF ( t1 ) * sk , t2 * ptr ) {
return OPENSSL_sk_push ( ( OPENSSL_STACK * ) sk , ( const void * ) ptr ) ;
}
static ossl_inline int sk_ ## t1 ## _unshift ( STACK_OF ( t1 ) * sk , t2 * ptr ) {
return OPENSSL_sk_unshift ( ( OPENSSL_STACK * ) sk , ( const void * ) ptr ) ;
}
static ossl_inline t2 * sk_ ## t1 ## _pop ( STACK_OF ( t1 ) * sk ) {
return ( t2 * ) OPENSSL_sk_pop ( ( OPENSSL_STACK * ) sk ) ;
}
static ossl_inline t2 * sk_ ## t1 ## _shift ( STACK_OF ( t1 ) * sk ) {
return ( t2 * ) OPENSSL_sk_shift ( ( OPENSSL_STACK * ) sk ) ;
}
static ossl_inline void sk_ ## t1 ## _pop_free ( STACK_OF ( t1 ) * sk , sk_ ## t1 ## _freefunc freefunc ) {
OPENSSL_sk_pop_free ( ( OPENSSL_STACK * ) sk , ( OPENSSL_sk_freefunc ) freefunc ) ;
}
static ossl_inline int sk_ ## t1 ## _insert ( STACK_OF ( t1 ) * sk , t2 * ptr , int idx ) {
return OPENSSL_sk_insert ( ( OPENSSL_STACK * ) sk , ( const void * ) ptr , idx ) ;
}
static ossl_inline t2 * sk_ ## t1 ## _set ( STACK_OF ( t1 ) * sk , int idx , t2 * ptr ) {
return ( t2 * ) OPENSSL_sk_set ( ( OPENSSL_STACK * ) sk , idx , ( const void * ) ptr ) ;
}
static ossl_inline int sk_ ## t1 ## _find ( STACK_OF ( t1 ) * sk , t2 * ptr ) {
return OPENSSL_sk_find ( ( OPENSSL_STACK * ) sk , ( const void * ) ptr ) ;
}
static ossl_inline int sk_ ## t1 ## _find_ex ( STACK_OF ( t1 ) * sk , t2 * ptr ) {
return OPENSSL_sk_find_ex ( ( OPENSSL_STACK * ) sk , ( const void * ) ptr ) ;
}
static ossl_inline void sk_ ## t1 ## _sort ( STACK_OF ( t1 ) * sk ) {
OPENSSL_sk_sort ( ( OPENSSL_STACK * ) sk ) ;
}
static ossl_inline int sk_ ## t1 ## _is_sorted ( const STACK_OF ( t1 ) * sk ) {
return OPENSSL_sk_is_sorted ( ( const OPENSSL_STACK * ) sk ) ;
}
static ossl_inline STACK_OF ( t1 ) * sk_ ## t1 ## _dup ( const STACK_OF ( t1 ) * sk ) {
return ( STACK_OF ( t1 ) * ) OPENSSL_sk_dup ( ( const OPENSSL_STACK * ) sk ) ;
}
static ossl_inline STACK_OF ( t1 ) * sk_ ## t1 ## _deep_copy ( const STACK_OF ( t1 ) * sk , sk_ ## t1 ## _copyfunc copyfunc , sk_ ## t1 ## _freefunc freefunc ) {
return ( STACK_OF ( t1 ) * ) OPENSSL_sk_deep_copy ( ( const OPENSSL_STACK * ) sk , ( OPENSSL_sk_copyfunc ) copyfunc , ( OPENSSL_sk_freefunc ) freefunc ) ;
}
static ossl_inline sk_ ## t1 ## _compfunc sk_ ## t1 ## _set_cmp_func ( STACK_OF ( t1 ) * sk , sk_ ## t1 ## _compfunc compare ) {
return ( sk_ ## t1 ## _compfunc ) OPENSSL_sk_set_cmp_func ( ( OPENSSL_STACK * ) sk , ( OPENSSL_sk_compfunc ) compare ) ;
}
# define DEFINE_SPECIAL_STACK_OF ( t1 , t2 ) SKM_DEFINE_STACK_OF ( t1 , t2 , t2 ) # define DEFINE_STACK_OF ( t ) SKM_DEFINE_STACK_OF ( t , t , t ) # define DEFINE_SPECIAL_STACK_OF_CONST ( t1 , t2 ) SKM_DEFINE_STACK_OF ( t1 , const t2 , t2 ) # define DEFINE_STACK_OF_CONST ( t ) SKM_DEFINE_STACK_OF ( t , const t , t ) typedef char * OPENSSL_STRING ;
typedef const char * OPENSSL_CSTRING ;
DEFINE_SPECIAL_STACK_OF ( OPENSSL_STRING , char ) DEFINE_SPECIAL_STACK_OF_CONST ( OPENSSL_CSTRING , char ) typedef void * OPENSSL_BLOCK ;
DEFINE_SPECIAL_STACK_OF ( OPENSSL_BLOCK , void ) | 0False
|
Categorize the following code snippet as vulnerable or not. True or False | static gboolean should_skip_readdir_error ( CommonJob * common , GFile * dir ) {
if ( common -> skip_readdir_error != NULL ) {
return g_hash_table_lookup ( common -> skip_readdir_error , dir ) != NULL ;
}
return FALSE ;
} | 0False
|
End of preview. Expand
in Dataset Viewer.
README.md exists but content is empty.
Use the Edit dataset card button to edit it.
- Downloads last month
- 40