code
stringlengths 46
36.6k
| language
stringclasses 9
values | AST_depth
int64 3
30
| alphanumeric_fraction
float64 0.2
0.98
| max_line_length
int64 13
399
| avg_line_length
float64 5.08
140
| num_lines
int64 7
299
| original_docstring
stringlengths 8
34.8k
| source
stringclasses 2
values |
---|---|---|---|---|---|---|---|---|
function selectMap() {
$('#loadingText').hide();
$('#accountOverlay').hide();
$('#title').hide();
$('#selectMap').html("");
if (JSON.parse(data).length == 0) {
$('#selectMap').text('No maps could be loaded from the server. Please try again later.')
} else {
JSON.parse(data).forEach(function(currentVar) {
$('#selectMap').append('<a data-map-id="'+currentVar.MAPCODE +'" onclick="mapSelected(this);"><img alt="'+ currentVar.MAPNAME +'" src="maps/'+ currentVar.MAPCODE +'.jpg"><div class="text"><p class="title">'+ currentVar.MAPNAME +'</p><p class="description">'+ currentVar.MAPDESC +'</p></div></div>');
});
console.warn(data);
}
$('#selectMap').fadeIn(500);
console.error("AJAX error! Contact website adminstrator or check network connection.");
$('#selectMap').text('The request to the server enountered an error. Check your internet connection, or try again later.');
for (var key in tempMapdata) {
if (tempMapdata.hasOwnProperty(key)) {
$('#selectMap').append('<a data-map-id="'+tempMapdata[key].MAPCODE +'" onclick="mapSelected(this);"><img alt="'+ tempMapdata[key].MAPNAME +'" src="maps/'+ tempMapdata[key].MAPCODE +'.jpg"><div class="text"><p class="title">'+ tempMapdata[key].MAPNAME +'</p><p class="description">'+ tempMapdata[key].MAPDESC +'</p></div></div>');
}
}
$('#selectMap').fadeIn(500);
} | javascript | 25 | 0.633742 | 341 | 59.347826 | 23 | // Initialize 'select map' screen... | function |
def from_sql(row):
data = row.__dict__.copy()
data['id'] = row.id
data['user'] = row.user
data['timestamp'] = row.timestamp
data['query_text'] = row.query_text
data['original_file_url'] = row.original_file_url
data['bucket_file_url'] = row.bucket_file_url
data['file_type'] = row.file_type
data['json'] = row.json
data.pop('_sa_instance_state')
return data | python | 8 | 0.60396 | 53 | 32.75 | 12 | Translates a SQLAlchemy model instance into a dictionary | function |
@Test
public void testSetPostalCode() {
System.out.println("setPostalCode");
PostCode postalCode = new PostCode("4000", "010");
GeographicArea instance = new GeographicArea();
instance.setPostalCode(postalCode);
assertEquals(postalCode, instance.getPostalCode());
} | java | 8 | 0.667732 | 59 | 38.25 | 8 | /**
* Test of setPostalCode method, of class GeographicArea.
*/ | function |
public override void CopyTo(IModelElement[] array, int arrayIndex)
{
if ((this._parent.Status != null))
{
array[arrayIndex] = this._parent.Status;
arrayIndex = (arrayIndex + 1);
}
IEnumerator<IModelElement> erpJournalEntriesEnumerator = this._parent.ErpJournalEntries.GetEnumerator();
try
{
for (
; erpJournalEntriesEnumerator.MoveNext();
)
{
array[arrayIndex] = erpJournalEntriesEnumerator.Current;
arrayIndex = (arrayIndex + 1);
}
}
finally
{
erpJournalEntriesEnumerator.Dispose();
}
if ((this._parent.ErpReceivable != null))
{
array[arrayIndex] = this._parent.ErpReceivable;
arrayIndex = (arrayIndex + 1);
}
if ((this._parent.ErpInvoiceLineItem != null))
{
array[arrayIndex] = this._parent.ErpInvoiceLineItem;
arrayIndex = (arrayIndex + 1);
}
IEnumerator<IModelElement> erpPaymentsEnumerator = this._parent.ErpPayments.GetEnumerator();
try
{
for (
; erpPaymentsEnumerator.MoveNext();
)
{
array[arrayIndex] = erpPaymentsEnumerator.Current;
arrayIndex = (arrayIndex + 1);
}
}
finally
{
erpPaymentsEnumerator.Dispose();
}
} | c# | 13 | 0.411639 | 120 | 38.041667 | 48 | /// <summary>
/// Copies the contents of the collection to the given array starting from the given array index
/// </summary>
/// <param name="array">The array in which the elements should be copied</param>
/// <param name="arrayIndex">The starting index</param> | function |
def build(self, args):
if args['<devicename>'] == "all":
import glob
devices_list = set(glob.glob('devices/*')) - \
set(glob.glob('devices/*.ignore'))
else:
devices_list = [args['<devicename>'].encode('ascii', 'ignore')]
def go_build(filename):
print("INFO: building configuration for %s" % filename)
with open(filename, 'r') as f:
for line in f:
line = line.strip()
if line.split(' ')[0] == "vendor":
vendor = line.split(' ')[1]
elif line.split(' ')[0] == "transport":
transport = line.split(' ')[1]
if transport not in ['telnet', 'ssh']:
print("ERROR: unknown transport mechanism: %s" % transport)
sys.exit(2)
elif line.split(' ')[0] == "include":
polname = line.split(' ')[1]
print("")
print("")
print("Seed policy name: %s" % polname)
print(" IPv4:")
for line in generate_policy(polname, afi=4,
vendor=vendor).split('\n'):
print(" %s" % line)
print(" ---------")
print(" IPv6:")
for line in generate_policy(polname, afi=6,
vendor=vendor).split('\n'):
print(" %s" % line)
print("")
for device in devices_list:
go_build(device) | python | 19 | 0.373942 | 87 | 48.277778 | 36 |
Show unified build between last commit and current state.
Usage: aclhound [-d] [-j] build <devicename>
aclhound [-d] [-j] build all
Options:
-d --debug Enable debugging output
-j --jenkins Use jenkins environmental variables like WORKSPACE
Arguments:
<devicename>
The device file for which a network config must be generated.
<all>
Build all network policies into their respective vendor specific
representation. Useful as 'review' test in Jenkins.
Note: please ensure you run 'build' inside your ACLHound data directory
| function |
public List<ModulosDomainModel> GetModulosAll()
{
var modulos = repository.GetAll().Select(m => new ModulosDomainModel
{
Id = m.Id,
StrValor = m.StrValor
}).ToList();
return modulos;
} | c# | 18 | 0.485714 | 80 | 30.222222 | 9 | /// <summary>
/// Este metodo se encarga de consultar todos los modulos del sistema
/// </summary>
/// <returns>regresa una lista de modulos</returns> | function |
class NativeMailbox final : public MessageMailbox {
public:
DRAKE_NO_COPY_NO_MOVE_NO_ASSIGN(NativeMailbox)
NativeMailbox(const std::string& channel_name, ::lcm::LCM* lcm) {
lcm->subscribe(channel_name, &NativeMailbox::Handle, this);
}
private:
void Handle(const ::lcm::ReceiveBuffer*, const std::string&,
const lcmt_drake_signal* message) {
DRAKE_DEMAND(message != nullptr);
SetMessage(*message);
}
} | c++ | 10 | 0.687215 | 67 | 32.769231 | 13 | // Subscribes to LCM without any DrakeLcmInterface sugar or mocks. | class |
def subtractMedian(data, time=None, width=100, dt=None):
if time is not None:
if dt is None:
x = data.xvals(0)
dt = x[1] - x[0]
width = time / dt
d1 = data.view(np.ndarray)
width = int(width)
med = scipy.ndimage.median_filter(d1, size=width)
d2 = d1 - med
if (hasattr(data, 'implements') and data.implements('MetaArray')):
return MetaArray(d2, info=data.infoCopy())
return d2 | python | 11 | 0.583149 | 70 | 33.769231 | 13 | Subtract rolling median from signal.
Arguments:
width: the width of the filter window in samples
time: the width of the filter window in x value
if specified, then width is ignored.
dt: the conversion factor for time -> width
| function |
public static object Clone(object from)
{
var type = from.GetType();
var method = typeof(ModelTransform).GetMethods(BindingFlags.Public | BindingFlags.Static).First(m => m.Name == "New")
.MakeGenericMethod(type);
var to = method.Invoke(null, new object[] { from });
return to;
} | c# | 17 | 0.681208 | 120 | 36.375 | 8 | /// <summary>
/// Clones an object to a new one of the same Type, via Reflection, including
/// reflecting the Type.
/// </summary>
/// <param name="from"></param>
/// <returns></returns> | function |
public class QRadar {
String name = "";
String password = "";
String host_qradar = "";
String content = "Trigger the validation bot";
/**
* Constructor
*
* @param name
* @param password
* @param host_qradar
*/
public QRadar(String name, String password, String host_qradar) {
this.name = name;
this.password = password;
this.host_qradar = host_qradar;
}
ArrayList<String> new_ids = new ArrayList<String>(); // All the new offence id's are added here.
public void send_offences(ArrayList<String> ids)
throws ClientProtocolException, IOException, ParseException, InterruptedException {
String authString = name + ":" + password;
byte[] authEncBytes = Base64.encodeBase64(authString.getBytes());
String authStringEnc = new String(authEncBytes);
String authcode = "Basic" + " " + authStringEnc;
final SSLConnectionSocketFactory sslsf;
try {
sslsf = new SSLConnectionSocketFactory(SSLContext.getDefault(), NoopHostnameVerifier.INSTANCE);
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
final Registry<ConnectionSocketFactory> registry = RegistryBuilder.<ConnectionSocketFactory>create()
.register("http", new PlainConnectionSocketFactory()).register("https", sslsf).build();
final PoolingHttpClientConnectionManager cm = new PoolingHttpClientConnectionManager(registry);
cm.setMaxTotal(100);
HttpClient httpclient = HttpClients.custom().setSSLSocketFactory(sslsf).setConnectionManager(cm).build();
String url = "https://" + host_qradar + "/api/siem/offenses";
HttpGet httpGet = new HttpGet(url);
httpGet.addHeader("Version", "7.0");
httpGet.addHeader("Authorization", authcode);
httpGet.addHeader("Accept", "application/json");
httpGet.addHeader("Content-Type", "application/json");
HttpResponse response = httpclient.execute(httpGet);
HttpEntity entity = response.getEntity();
String responseString = EntityUtils.toString(entity, "UTF-8");
JSONParser parser = new JSONParser();
JSONArray jsonArr = (JSONArray) parser.parse(responseString);
for (int i = 0; i < jsonArr.size(); i++) {
JSONObject jsonObj = (JSONObject) jsonArr.get(i);
String id = jsonObj.get("id").toString();
if (!new_ids.contains(id)) {
if (jsonObj.get("status").toString().equals("OPEN")) {
new_ids.add(id);
// You can add your own offence source below
System.out.print("New offence detected........");
TimeUnit.MILLISECONDS.sleep(100);
System.out.print("...");
TimeUnit.MILLISECONDS.sleep(100);
System.out.print("...");
TimeUnit.MILLISECONDS.sleep(100);
System.out.print("...");
TimeUnit.MILLISECONDS.sleep(100);
System.out.println("...");
TimeUnit.MILLISECONDS.sleep(1000);
System.out.println("Offence Name : "+jsonObj.get("offense_source").toString());
TimeUnit.MILLISECONDS.sleep(1000);
System.out.println("Running the validation bot, to check if the offence is valid");
String path_trigger = ""; // Update this with absolute path of trigger.txt file
Files.write(Paths.get(path_trigger), content.getBytes());
}
}
}
}
} | java | 19 | 0.69669 | 107 | 31.739583 | 96 | /**
* Wrapper class for QRadar.
*
* @author Rahul Reddy Ravipally
*
*/ | class |
public final class StaticIcmpV6TypePacketFactory
extends AbstractStaticPacketFactory<IcmpV6Type> {
private static final StaticIcmpV6TypePacketFactory INSTANCE
= new StaticIcmpV6TypePacketFactory();
private StaticIcmpV6TypePacketFactory() {
instantiaters.put(
IcmpV6Type.DESTINATION_UNREACHABLE, new PacketInstantiater() {
@Override
public Packet newInstance(
byte[] rawData, int offset, int length
) throws IllegalRawDataException {
return IcmpV6DestinationUnreachablePacket.newPacket(rawData, offset, length);
}
@Override
public Class<IcmpV6DestinationUnreachablePacket> getTargetClass() {
return IcmpV6DestinationUnreachablePacket.class;
}
}
);
instantiaters.put(
IcmpV6Type.PACKET_TOO_BIG, new PacketInstantiater() {
@Override
public Packet newInstance(
byte[] rawData, int offset, int length
) throws IllegalRawDataException {
return IcmpV6PacketTooBigPacket.newPacket(rawData, offset, length);
}
@Override
public Class<IcmpV6PacketTooBigPacket> getTargetClass() {
return IcmpV6PacketTooBigPacket.class;
}
}
);
instantiaters.put(
IcmpV6Type.TIME_EXCEEDED, new PacketInstantiater() {
@Override
public Packet newInstance(
byte[] rawData, int offset, int length
) throws IllegalRawDataException {
return IcmpV6TimeExceededPacket.newPacket(rawData, offset, length);
}
@Override
public Class<IcmpV6TimeExceededPacket> getTargetClass() {
return IcmpV6TimeExceededPacket.class;
}
}
);
instantiaters.put(
IcmpV6Type.PARAMETER_PROBLEM, new PacketInstantiater() {
@Override
public Packet newInstance(
byte[] rawData, int offset, int length
) throws IllegalRawDataException {
return IcmpV6ParameterProblemPacket.newPacket(rawData, offset, length);
}
@Override
public Class<IcmpV6ParameterProblemPacket> getTargetClass() {
return IcmpV6ParameterProblemPacket.class;
}
}
);
instantiaters.put(
IcmpV6Type.ECHO_REQUEST, new PacketInstantiater() {
@Override
public Packet newInstance(
byte[] rawData, int offset, int length
) throws IllegalRawDataException {
return IcmpV6EchoRequestPacket.newPacket(rawData, offset, length);
}
@Override
public Class<IcmpV6EchoRequestPacket> getTargetClass() {
return IcmpV6EchoRequestPacket.class;
}
}
);
instantiaters.put(
IcmpV6Type.ECHO_REPLY, new PacketInstantiater() {
@Override
public Packet newInstance(
byte[] rawData, int offset, int length
) throws IllegalRawDataException {
return IcmpV6EchoReplyPacket.newPacket(rawData, offset, length);
}
@Override
public Class<IcmpV6EchoReplyPacket> getTargetClass() {
return IcmpV6EchoReplyPacket.class;
}
}
);
instantiaters.put(
IcmpV6Type.ROUTER_SOLICITATION, new PacketInstantiater() {
@Override
public Packet newInstance(
byte[] rawData, int offset, int length
) throws IllegalRawDataException {
return IcmpV6RouterSolicitationPacket.newPacket(rawData, offset, length);
}
@Override
public Class<IcmpV6RouterSolicitationPacket> getTargetClass() {
return IcmpV6RouterSolicitationPacket.class;
}
}
);
instantiaters.put(
IcmpV6Type.ROUTER_ADVERTISEMENT, new PacketInstantiater() {
@Override
public Packet newInstance(
byte[] rawData, int offset, int length
) throws IllegalRawDataException {
return IcmpV6RouterAdvertisementPacket.newPacket(rawData, offset, length);
}
@Override
public Class<IcmpV6RouterAdvertisementPacket> getTargetClass() {
return IcmpV6RouterAdvertisementPacket.class;
}
}
);
instantiaters.put(
IcmpV6Type.NEIGHBOR_SOLICITATION, new PacketInstantiater() {
@Override
public Packet newInstance(
byte[] rawData, int offset, int length
) throws IllegalRawDataException {
return IcmpV6NeighborSolicitationPacket.newPacket(rawData, offset, length);
}
@Override
public Class<IcmpV6NeighborSolicitationPacket> getTargetClass() {
return IcmpV6NeighborSolicitationPacket.class;
}
}
);
instantiaters.put(
IcmpV6Type.NEIGHBOR_ADVERTISEMENT, new PacketInstantiater() {
@Override
public Packet newInstance(
byte[] rawData, int offset, int length
) throws IllegalRawDataException {
return IcmpV6NeighborAdvertisementPacket.newPacket(rawData, offset, length);
}
@Override
public Class<IcmpV6NeighborAdvertisementPacket> getTargetClass() {
return IcmpV6NeighborAdvertisementPacket.class;
}
}
);
instantiaters.put(
IcmpV6Type.REDIRECT, new PacketInstantiater() {
@Override
public Packet newInstance(
byte[] rawData, int offset, int length
) throws IllegalRawDataException {
return IcmpV6RedirectPacket.newPacket(rawData, offset, length);
}
@Override
public Class<IcmpV6RedirectPacket> getTargetClass() {
return IcmpV6RedirectPacket.class;
}
}
);
};
/**
*
* @return the singleton instance of StaticIcmpV6TypePacketFactory.
*/
public static StaticIcmpV6TypePacketFactory getInstance() {
return INSTANCE;
}
} | java | 15 | 0.650112 | 87 | 32.604651 | 172 | /**
* @author Kaito Yamada
* @since pcap4j 0.9.15
*/ | class |
def _get_logging_level(level_int):
if isinstance(level_int, bool):
level_int = int(level_int)
if level_int < 0:
return logging.CRITICAL + 1
elif level_int == 0:
return logging.WARNING
elif level_int == 1:
return logging.INFO
elif level_int == 2:
return logging.DEBUG
elif level_int in [10, 20, 30, 40, 50]:
return level_int
elif isinstance(level_int, int):
return level_int
else:
raise ValueError(f"logging level set to {level_int}, "
"but it must be an integer <= 2.") | python | 12 | 0.568792 | 62 | 32.166667 | 18 | Convert a logging level integer into a log level. | function |
public class BuildHistoryImpl implements BuildHistory, ControllableBuildHistory {
private final JenkinsJob associatedJob;
private final SortedList< JenkinsBuild > sortedBuilds;
private final ObservableList< JenkinsBuild > modifiableBuilds;
private final Map< Integer, JenkinsBuild > buildMap;
/**
* Constructs a new {@link BuildHistoryImpl}.
* @param job the {@link JenkinsJob} associated.
*/
public BuildHistoryImpl( JenkinsJob job ) {
if ( job == null ) {
throw new IllegalArgumentException( "Must supply non null job." );
}
this.associatedJob = job;
this.modifiableBuilds = new SynchronizedObservableList<>();
this.sortedBuilds = new SortedList<>(
modifiableBuilds,
( a, b ) -> Integer.compare( a.buildNumber(), b.buildNumber()
) );
this.buildMap = new TreeMap<>();
}//End Constructor
/**
* {@inheritDoc}
*/
@Override public JenkinsJob jenkinsJob() {
return associatedJob;
}//End Method
/**
* {@inheritDoc}
*/
@Override public ObservableList< JenkinsBuild > builds() {
return sortedBuilds;
}//End Method
/**
* {@inheritDoc}
*/
@Override public JenkinsBuild getHistoryFor( int buildNumber ) {
return buildMap.get( buildNumber );
}//End Method
/**
* {@inheritDoc}
*/
@Override public void addBuildHistory( JenkinsBuild build ) {
if ( modifiableBuilds.contains( build ) ) {
return;
}
if ( buildMap.containsKey( build.buildNumber() ) ) {
throw new IllegalArgumentException( "Build already present for " + build.buildNumber() );
}
updateBuilds( build );
}//End Method
/**
* Method to update the {@link JenkinsBuild} in the structures held by the {@link BuildHistory}.
* @param build the {@link JenkinsBuild} to add.
*/
private void updateBuilds( JenkinsBuild build ) {
buildMap.put( build.buildNumber(), build );
modifiableBuilds.add( build );
}//End Method
} | java | 14 | 0.628075 | 99 | 28.628571 | 70 | /**
* Implementation of the {@link BuildHistory} interface.
*/ | class |
public class ListAdminsCommand : Command
{
public ListAdminsCommand(CommandConfiguration config, ITranslationLookup translationLookup) : base(config, translationLookup)
{
Name = "admins";
Description = _translationLookup["COMMANDS_ADMINS_DESC"];
Alias = "a";
Permission = Permission.User;
RequiresTarget = false;
}
public static string OnlineAdmins(Server S, ITranslationLookup lookup)
{
var onlineAdmins = S.GetClientsAsList()
.Where(p => p.Level > Permission.Flagged)
.Where(p => !p.Masked)
.Select(p => $"[^3{Utilities.ConvertLevelToColor(p.Level, p.ClientPermission.Name)}^7] {p.Name}");
return onlineAdmins.Count() > 0 ?
string.Join(Environment.NewLine, onlineAdmins) :
lookup["COMMANDS_ADMINS_NONE"];
}
public override Task ExecuteAsync(GameEvent E)
{
foreach (string line in OnlineAdmins(E.Owner, _translationLookup).Split(Environment.NewLine))
{
var _ = E.Message.IsBroadcastCommand(_config.BroadcastCommandPrefix) ? E.Owner.Broadcast(line) : E.Origin.Tell(line);
}
return Task.CompletedTask;
}
} | c# | 20 | 0.589939 | 133 | 44.275862 | 29 | /// <summary>
/// Lists all unmasked admins
/// </summary> | class |
def start_process(process_handle):
is_win = environment.platform() == 'WINDOWS'
if is_win:
subprocess.list2cmdline_orig = subprocess.list2cmdline
subprocess.list2cmdline = lambda s: s[0]
try:
process_handle.run()
finally:
if is_win:
subprocess.list2cmdline = subprocess.list2cmdline_orig | python | 11 | 0.712934 | 60 | 30.8 | 10 | Start the process using process handle and override list2cmdline for
Windows. | function |
public static T InFeed<T>(
this T payload,
long? inlineCount = null,
string nextLink = null,
int elementsBefore = 0,
int elementsAfter = 0) where T : PayloadTestDescriptor
{
Debug.Assert(payload.PayloadElement.GetPayloadKindFromPayloadElement() == ODataPayloadKind.Resource, "only entries are supported.");
EntityInstance payloadEntity = (EntityInstance)payload.PayloadElement;
EntitySetInstance entitySetInstance = PayloadBuilder.EntitySet();
if (payload.PayloadEdmModel != null)
{
entitySetInstance.WithTypeAnnotation(ModelBuilder.GetPayloadElementEntityType(payloadEntity, payload.PayloadEdmModel));
}
entitySetInstance.AddRange(payloadEntity.GenerateSimilarEntries(elementsBefore));
entitySetInstance.Add(payload.PayloadElement);
entitySetInstance.AddRange(payloadEntity.GenerateSimilarEntries(elementsAfter));
entitySetInstance.InlineCount((int?)inlineCount).NextLink(nextLink);
bool hasInlineCountOrNextLink = inlineCount.HasValue || nextLink != null;
T feedPayloadDescriptor = (T)payload.Clone();
feedPayloadDescriptor.PayloadEdmModel = payload.PayloadEdmModel;
feedPayloadDescriptor.PayloadElement = entitySetInstance;
feedPayloadDescriptor.SkipTestConfiguration = tc =>
{
bool skip = payload.SkipTestConfiguration == null ? false : payload.SkipTestConfiguration(tc);
return skip || (hasInlineCountOrNextLink && tc.IsRequest);
};
return feedPayloadDescriptor;
} | c# | 16 | 0.666472 | 144 | 57.965517 | 29 | /// <summary>
/// Puts the specified <paramref name="payload"/> into a feed. It accepts optional parameters to control other
/// properties of the feed and the position of the <paramref name="payload"/> inside the feed.
/// </summary>
/// <param name="payload">The paylaod to be used inside of the feed. This must represent an entity instance payload.</param>
/// <param name="inlineCount">An optional inline count value for the feed.</param>
/// <param name="nextLink">An optional next-link value for the feed.</param>
/// <param name="elementsBefore">An optional number of entries that should exist before the <paramref name="payload"/> in the feed.</param>
/// <param name="elementsAfter">An optional number of entries that should exist after the <paramref name="payload"/> in the feed.</param>
/// <returns>A feed payload with the <paramref name="payload"/> as one of its entities.</returns> | function |
func (p *OnuTcontProfile) GetTcontDescription() string {
name := p.GenerateTcontName()
var descr = []string{
"Transmission Containers are responsible for negotiating customer services in a multiaccess architecture",
"Since GPON is Asymmetrical, where the Downlink frame is Broadcast/Unicast, T-Conts only control upstream traffic",
"There are some tricks to using T-Conts that will become more familiar throughout this exercise",
"T-Cont Types are a value from 1-5 identifying the handling of committed and burst rates",
"Type 1 allows setting a Fixed rate that is consumed whether or not the customer is using it, useful for TDM emulation",
"Type 2 allows setting as Assured rate that is not consumed when not in use, but has priority handling over best-effort services otherwise",
"Type 3 allows setting an Assured and Max rate, so a portion of the Max is given priority handling, and a portion is best-effort",
"Type 4 allows setting a Max rate for best-effort handling",
"Type 5 is a special T-Cont that allows setting Fixed, Assured and Maximum rates",
"The default T-Cont Type if not set is 5",
"T-Cont IDs are a value from 1-6 that allow stacking multiple T-Conts on the same ONU",
"T-Cont IDs cannot overlap on the same ONU",
"Best practices recommend a structure where the same 'types' of services are given the same IDs, as it is less likely for these to be applied to the same ONU",
"An example would be always specifying CWMP as ID 6, Internet data as type 2, VoIP as type 3, and IPTV as type 4",
"The default T-Cont ID if not set is 1",
"T-Conts can be flexibly named, however due to their function a standard naming convention is recommended",
}
for _, l := range descr {
fmt.Println(l)
}
name = "The Auto-Generated T-CONT name is:" + name
return name
} | go | 10 | 0.752212 | 161 | 68.576923 | 26 | // GetTcontDescription returns a string of helpful information about TConts, including parameters specific to the selected profile
| function |
public class CreateRuntimeImage {
private final Set<Path> modulePath;
private final List<String> modules;
private final Path outputDirectory;
private boolean ignoreSigningInformation;
private final String launcher;
private final Log log;
private final Integer compression;
private final boolean stripDebug;
private final boolean noHeaderFiles;
private final boolean noManPages;
private final List<String> excludeResourcesPatterns;
public CreateRuntimeImage(Set<Path> modulePath, List<String> modules, String launcherName, String launcherModule,
Path outputDirectory, Integer compression, boolean stripDebug, boolean ignoreSigningInformation, List<String> excludeResourcesPatterns, Log log, boolean noHeaderFiles, boolean noManPages) {
this.modulePath = ( modulePath != null ? modulePath : Collections.emptySet() );
this.modules = getModules( modules );
this.outputDirectory = outputDirectory;
this.ignoreSigningInformation = ignoreSigningInformation;
this.launcher = launcherName != null && launcherModule != null ? launcherName + "=" + launcherModule : null;
this.compression = compression;
this.stripDebug = stripDebug;
this.excludeResourcesPatterns = excludeResourcesPatterns;
this.log = log;
this.noHeaderFiles = noHeaderFiles;
this.noManPages = noManPages;
}
private static List<String> getModules(List<String> modules) {
if ( modules == null || modules.isEmpty() ) {
throw new IllegalArgumentException("At least one module must be added using the <modules> configuration property.");
}
return Collections.unmodifiableList( modules );
}
public void run() {
runJlink();
}
private void runJlink() throws AssertionError {
String javaHome = System.getProperty("java.home");
String jlinkBin = javaHome +
File.separator + "bin" +
File.separator + "jlink";
List<String> command = new ArrayList<>();
command.add( jlinkBin );
command.add( "--add-modules" );
command.add( String.join( ",", modules ) );
command.add( "--module-path" );
command.add( modulePath.stream()
.map( Path::toString )
.collect( Collectors.joining( File.pathSeparator ) )
);
command.add( "--output" );
command.add( outputDirectory.toString() );
if ( launcher != null ) {
command.add( "--launcher" );
command.add( launcher );
}
if ( compression != null ) {
command.add( "--compress" );
command.add( compression.toString() );
}
if ( stripDebug ) {
command.add( "--strip-debug" );
}
if (ignoreSigningInformation) {
command.add( "--ignore-signing-information" );
}
if ( !excludeResourcesPatterns.isEmpty() ) {
command.add( "--exclude-resources=" + String.join( ",", excludeResourcesPatterns ) );
}
if ( noHeaderFiles ) {
command.add( "--no-header-files" );
}
if ( noManPages ) {
command.add( "--no-man-pages" );
}
log.debug( "Running jlink: " + String.join( " ", command ) );
ProcessExecutor.run( "jlink", command, log );
}
} | java | 14 | 0.608569 | 219 | 35.126316 | 95 | /**
* Creates a modular runtime image for the given modules and module path, via jlink.
*
* @author Gunnar Morling
*/ | class |
public GameObject AddSplineNode( )
{
if( splineNodesArray.Count > 0 )
return AddSplineNode( splineNodesArray[splineNodesArray.Count-1] );
else
return AddSplineNode( null );
} | c# | 14 | 0.736559 | 70 | 25.714286 | 7 | /// <summary>
/// Use this function to quickly append a new SplineNode at the spline's end.
/// </summary>
/// <returns>
/// A new GameObject that has a SplineNode-Component attached to it.
/// </returns> | function |
public class IllegalFormatWidthException extends IllegalFormatException {
private static final long serialVersionUID = 16660902L;
private int w;
/**
* Constructs an instance of this class with the specified width.
*
* @param w
* The width
*/
public IllegalFormatWidthException(int w) {
this.w = w;
}
/**
* Returns the width
*
* @return The width
*/
public int getWidth() {
return w;
}
public String getMessage() {
return Integer.toString(w);
}
} | java | 8 | 0.581722 | 73 | 18.655172 | 29 | /**
* Unchecked exception thrown when the format width is a negative value other
* than <tt>-1</tt> or is otherwise unsupported.
*
* @since 1.5
*/ | class |
def _get_wmi_setting(wmi_class_name, setting, server):
with salt.utils.winapi.Com():
try:
connection = wmi.WMI(namespace=_WMI_NAMESPACE)
wmi_class = getattr(connection, wmi_class_name)
objs = wmi_class([setting], Name=server)[0]
ret = getattr(objs, setting)
except wmi.x_wmi as error:
_LOG.error("Encountered WMI error: %s", error.com_error)
except (AttributeError, IndexError) as error:
_LOG.error("Error getting %s: %s", wmi_class_name, error)
return ret | python | 13 | 0.601786 | 69 | 45.75 | 12 |
Get the value of the setting for the provided class.
| function |
class InputFunction:
"""Callable input function that computes the risk set for each batch.
Parameters
----------
images : np.ndarray, shape=(n_samples, height, width)
Image data.
time : np.ndarray, shape=(n_samples,)
Observed time.
event : np.ndarray, shape=(n_samples,)
Event indicator.
batch_size : int, optional, default=64
Number of samples per batch.
drop_last : int, optional, default=False
Whether to drop the last incomplete batch.
shuffle : bool, optional, default=False
Whether to shuffle data.
seed : int, optional, default=89
Random number seed.
"""
def __init__(self,
images: np.ndarray,
time: np.ndarray,
event: np.ndarray,
batch_size: int = 10,
drop_last: bool = False,
shuffle: bool = False,
seed: int = 89) -> None:
if images.ndim == 3:
images = images[..., np.newaxis]
self.images = images
self.time = time
self.event = event
self.batch_size = batch_size
self.drop_last = drop_last
self.shuffle = shuffle
self.seed = seed
def size(self) -> int:
"""Total number of samples."""
return self.images.shape[0]
def steps_per_epoch(self) -> int:
"""Number of batches for one epoch."""
return int(np.floor(self.size() / self.batch_size))
def _get_data_batch(self, index: np.ndarray) -> Tuple[np.ndarray, Dict[str, np.ndarray]]:
"""Compute risk set for samples in batch."""
time = self.time[index]
event = self.event[index]
images = self.images[index]
labels = {
"label_event": event.astype(np.int32),
"label_time": time.astype(np.float32),
"label_riskset": _make_riskset(time)
}
return images, labels
def _iter_data(self) -> Iterable[Tuple[np.ndarray, Dict[str, np.ndarray]]]:
"""Generator that yields one batch at a time."""
index = np.arange(self.size())
rnd = np.random.RandomState(self.seed)
if self.shuffle:
rnd.shuffle(index)
for b in range(self.steps_per_epoch()):
start = b * self.batch_size
idx = index[start:(start + self.batch_size)]
yield self._get_data_batch(idx)
if not self.drop_last:
start = self.steps_per_epoch() * self.batch_size
idx = index[start:]
yield self._get_data_batch(idx)
def _get_shapes(self) -> Tuple[tf.TensorShape, Dict[str, tf.TensorShape]]:
"""Return shapes of data returned by `self._iter_data`."""
batch_size = self.batch_size if self.drop_last else None
h, w, c = self.images.shape[1:]
images = tf.TensorShape([batch_size, h, w, c])
labels = {k: tf.TensorShape((batch_size,))
for k in ("label_event", "label_time")}
labels["label_riskset"] = tf.TensorShape((batch_size, batch_size))
return images, labels
def _get_dtypes(self) -> Tuple[tf.DType, Dict[str, tf.DType]]:
"""Return dtypes of data returned by `self._iter_data`."""
labels = {"label_event": tf.int32,
"label_time": tf.float32,
"label_riskset": tf.bool}
return tf.float32, labels
def _make_dataset(self) -> tf.data.Dataset:
"""Create dataset from generator."""
ds = tf.data.Dataset.from_generator(
self._iter_data,
self._get_dtypes(),
self._get_shapes()
)
return ds
def __call__(self) -> tf.data.Dataset:
return self._make_dataset() | python | 15 | 0.555141 | 93 | 34.424528 | 106 | Callable input function that computes the risk set for each batch.
Parameters
----------
images : np.ndarray, shape=(n_samples, height, width)
Image data.
time : np.ndarray, shape=(n_samples,)
Observed time.
event : np.ndarray, shape=(n_samples,)
Event indicator.
batch_size : int, optional, default=64
Number of samples per batch.
drop_last : int, optional, default=False
Whether to drop the last incomplete batch.
shuffle : bool, optional, default=False
Whether to shuffle data.
seed : int, optional, default=89
Random number seed.
| class |
class NicknameAttributes:
lastQuit: typing.Optional[str] = None
"""The nickname's last IRC quit message.\nRead Permission: Public"""
nick: str = ""
"""The nickname's last resolved hostname. \nRead Permission: Group"""
lastRealHost: typing.Optional[str] = None
"""The nickname's last IRC real name.\nRead Permission: Public"""
lastRealName: typing.Optional[str] = None
"""Timestamp for when the user was last seen on IRC\nRead Permission: Public"""
lastSeen: typing.Optional[datetime] = None
"""The nickname's last known (hashed) usermask\nRead Permission: Public"""
lastUserMask: typing.Optional[str] = None
"""The main nickname of the account this nickname belongs to\nRead Permission: Public"""
display: typing.Optional[str] = None
"""The IRC nickname\nRead Permission: Public"""
createdAt: typing.Optional[datetime] = None
"""Timestamp for when this nick was created\nRead Permission: Public"""
updatedAt: typing.Optional[datetime] = None
"""Timestamp for when this nick was last updated\nRead Permission: Public"""
vhost: typing.Optional[str] = None
"""The virtual-host of this nickname\nRead Permission: Public"""
email: typing.Optional[str] = None
"The email address this nickname belongs to Read Permission: Group"
score: typing.Optional[int] = None
"""closeness to searched nickname""" | python | 8 | 0.708423 | 92 | 54.6 | 25 | The nickname's last IRC quit message.\nRead Permission: Public | class |
def image_resize(self, image_path):
if self.resize_decision == True:
self.imagex = ImageTk.PhotoImage(file=image_path)
self.originalwidth, self.originalheight = self.imagex.width(), self.imagex.height()
if self.imagex.width() > 1000:
self.size_factor = floor(self.imagex.width() / 1000)
image = Image.open(self.image_path)
image = image.resize(
(self.imagex.width() // self.size_factor, self.imagex.height() // self.size_factor)
, Image.ANTIALIAS)
self.imagex = ImageTk.PhotoImage(image)
else:
self.size_factor = 1
pass
else:
self.imagex = ImageTk.PhotoImage(file=image_path)
self.originalwidth, self.originalheight = self.imagex.width(), self.imagex.height()
self.size_factor = 1
self.manipulatedwidth, self.manipulatedheight = self.imagex.width(), self.imagex.height() | python | 16 | 0.575937 | 103 | 52.421053 | 19 | gets the image_path and stores it in the imagex variable | function |
func outArgsClient(argPrefix string, errName string, data *goData, iface *compile.Interface, method *compile.Method) string {
first, args := "", method.OutArgs
if isStreamingMethod(method) {
first, args = "ocall "+uniqueName(iface, method, "ClientCall"), nil
}
return argParens(argNameTypes(argPrefix, first, "", errName+" error", data, args))
} | go | 11 | 0.732194 | 125 | 49.285714 | 7 | // outArgsClient returns the out args of an interface method on the client,
// wrapped in parens if necessary. The client side always returns a final
// error, in addition to the regular out-args. | function |
void EarlyScreen::putch(int attr, char c)
{
if (c == '\r') {
_current_x = 0;
} else if (c == '\n') {
_current_x = 0;
if (++_current_y == _height) {
_current_y = _height - 1;
scroll();
}
} else {
_video_ram[_current_x + (_current_y * _width)] = ((attr & 0xff) << 8) | c;
if (++_current_x == _width) {
_current_x = 0;
if (++_current_y == _height) {
_current_y = _height - 1;
scroll();
}
}
}
update_cursor();
} | c++ | 15 | 0.478842 | 76 | 19.454545 | 22 | /**
* Places a character on the screen at the current cursor position, and advances
* the cursor.
* @param attr The attributes for the character (e.g. colour information)
* @param c The character to display.
*/ | function |
static int
expr_reaches_here_p (struct occr *occr, struct expr *expr, basic_block bb,
int check_self_loop)
{
int rval;
char *visited = xcalloc (last_basic_block, 1);
rval = expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited);
free (visited);
return rval;
} | c | 7 | 0.678201 | 77 | 28 | 10 | /* This wrapper for expr_reaches_here_p_work() is to ensure that any
memory allocated for that function is returned. */ | function |
@FXML
public void newBook(Event e){
String[] emptyBookValues = new String[44];
Book currentBook = new Book(emptyBookValues);
currentBookIndex = -1;
initializeEditor(currentBook);
} | java | 7 | 0.736842 | 47 | 26.285714 | 7 | // book is not being edited, and open the editor with the blank book | function |
public class CachedExpressionCompiler : IExpressionEvaluator {
static ConcurrentDictionary<LambdaExpression, ParameterListDelegate> delegates = new ConcurrentDictionary<LambdaExpression, ParameterListDelegate>(new ExpressionComparing.StructuralComparer(ignoreConstantsValues: true));
public static readonly CachedExpressionCompiler Instance = new CachedExpressionCompiler();
private CachedExpressionCompiler() { }
VariadicArrayParametersDelegate IExpressionEvaluator.EvaluateLambda(LambdaExpression lambdaExpression) => CachedCompileLambda(lambdaExpression);
public VariadicArrayParametersDelegate CachedCompileLambda(LambdaExpression lambda) {
IReadOnlyList<object> constants;
ParameterListDelegate compiled;
if (delegates.TryGetValue(lambda, out compiled)) {
constants = ConstantExtractor.ExtractConstantsOnly(lambda.Body);
} else {
var extractionResult = ConstantExtractor.ExtractConstants(lambda.Body);
compiled = ParameterListRewriter.RewriteLambda(
Expression.Lambda(
extractionResult.ConstantfreeExpression.Body,
extractionResult.ConstantfreeExpression.Parameters.Concat(lambda.Parameters)))
.Compile();
delegates.TryAdd(lambda, compiled);
constants = extractionResult.ExtractedConstants;
}
return args => compiled(constants.Concat(args).ToArray());
}
object IExpressionEvaluator.Evaluate(Expression unparametrizedExpression) => CachedCompile(unparametrizedExpression);
public object CachedCompile(Expression unparametrizedExpression) => CachedCompileLambda(Expression.Lambda(unparametrizedExpression))();
DELEGATE IExpressionEvaluator.EvaluateTypedLambda<DELEGATE>(Expression<DELEGATE> expression) => CachedCompileTypedLambda(expression);
public DELEGATE CachedCompileTypedLambda<DELEGATE>(Expression<DELEGATE> expression) where DELEGATE : class => CachedCompileLambda(expression).WrapDelegate<DELEGATE>();
} | c# | 21 | 0.82446 | 222 | 69.296296 | 27 | /// <summary>
/// A cache for expression compilation. For compilations for structural identical <see cref="ExpressionComparing"/> expressions
/// this speeds up the compilations significantly (but slows the executions slightly) and avoids the memory leak created when
/// calling <see cref="LambdaExpression.Compile"/> repeatedly.
/// </summary>
/// <remarks>
/// The result from <see cref="LambdaExpression"/> compilation usually cannot be cached when it contains captured variables
/// or constants (closures) that should be replaced in later calls. This cache first extracts all constants from the expression
/// and will then look up the normalized (constant free) expression in a compile cache. The constants then get re-inserted into
/// the result via a closure and a delegate capturing the actual parameters of the original expression is returned.
/// </remarks> | class |
public abstract class AbstractFutureUtils {
private static final Logger log = LoggerFactory.getLogger(AbstractFutureUtils.class);
private static final AnyToVoidFunction ANY_TO_VOID_FUNCTION = new AnyToVoidFunction();
public abstract @NotNull ListenableFuture<Void> voidFutureFromAnyFuture(@NotNull ListenableFuture<?> anyFuture);
public @NotNull ListenableFuture<Void> voidFutureFromAnyFuture(
final @NotNull ListenableFuture<?> anyFuture, final @NotNull Executor executor) {
return Futures.transform(anyFuture, ANY_TO_VOID_FUNCTION, executor);
}
public @NotNull ListenableFuture<Void> mergeVoidFutures(
final @NotNull ListenableFuture<Void> future1, final @NotNull ListenableFuture<Void> future2) {
return voidFutureFromList(ImmutableList.of(future1, future2));
}
public <T> @NotNull ListenableFuture<Void> voidFutureFromList(final @NotNull ImmutableList<ListenableFuture<T>> futureList) {
if (futureList.isEmpty()) {
return Futures.immediateFuture(null);
}
final SettableFuture<Void> resultFuture = SettableFuture.create();
final FutureCallback<T> futureCallback = new VoidFutureCombiningCallback<>(futureList.size(), resultFuture);
for (final ListenableFuture<T> future : futureList) {
Futures.addCallback(future, futureCallback, MoreExecutors.directExecutor());
}
return resultFuture;
}
public void addExceptionLogger(final @NotNull ListenableFuture<?> listenableFuture) {
Futures.addCallback(listenableFuture, new FutureCallback<Object>() {
@Override
public void onSuccess(final @Nullable Object o) {
//no op
}
@Override
public void onFailure(final @NotNull Throwable throwable) {
log.error("Uncaught exception", throwable);
}
}, MoreExecutors.directExecutor());
}
public <T> void addPersistenceCallback(
final @NotNull ListenableFuture<T> future,
final @NotNull FutureCallback<? super T> callback,
final @NotNull ExecutorService executorService) {
if (!executorService.isShutdown()) {
Futures.addCallback(future, callback, executorService);
}
}
public abstract <T> void addPersistenceCallback(
@NotNull ListenableFuture<T> future, @NotNull FutureCallback<? super T> callback);
public abstract <E, C extends Collection<Set<E>>> @NotNull ListenableFuture<Set<E>> combineSetResults(
@NotNull ListenableFuture<C> collectionFuture);
public <E, C extends Collection<Set<E>>> @NotNull ListenableFuture<Set<E>> combineSetResults(
final @NotNull ListenableFuture<C> collectionFuture, final @NotNull Executor executor) {
final SettableFuture<Set<E>> resultFuture = SettableFuture.create();
Futures.addCallback(collectionFuture, new FutureCallback<C>() {
@Override
public void onSuccess(final @Nullable C result) {
if (result == null) {
resultFuture.set(null);
return;
}
final Set<E> resultSet = new HashSet<>();
for (final Set<E> set : result) {
resultSet.addAll(set);
}
resultFuture.set(resultSet);
}
@Override
public void onFailure(final @NotNull Throwable t) {
resultFuture.setException(t);
}
}, executor);
return resultFuture;
}
private static class AnyToVoidFunction implements Function<Object, Void> {
@Override
public Void apply(final @Nullable Object input) {
return null;
}
}
} | java | 17 | 0.641682 | 129 | 38.895833 | 96 | /**
* @author Lukas Brandl
*/ | class |
class HyperVReplicaBaseReplicationDetails extends models['ReplicationProviderSpecificSettings'] {
/**
* Create a HyperVReplicaBaseReplicationDetails.
* @member {date} [lastReplicatedTime] The Last replication time.
* @member {array} [vmNics] The PE Network details.
* @member {string} [vmId] The virtual machine Id.
* @member {string} [vmProtectionState] The protection state for the vm.
* @member {string} [vmProtectionStateDescription] The protection state
* description for the vm.
* @member {object} [initialReplicationDetails] Initial replication details.
* @member {string} [initialReplicationDetails.initialReplicationType]
* Initial replication type.
* @member {string}
* [initialReplicationDetails.initialReplicationProgressPercentage] The
* initial replication progress percentage.
* @member {array} [vMDiskDetails] VM disk details.
*/
constructor() {
super();
}
/**
* Defines the metadata of HyperVReplicaBaseReplicationDetails
*
* @returns {object} metadata of HyperVReplicaBaseReplicationDetails
*
*/
mapper() {
return {
required: false,
serializedName: 'HyperVReplicaBaseReplicationDetails',
type: {
name: 'Composite',
className: 'HyperVReplicaBaseReplicationDetails',
modelProperties: {
instanceType: {
required: true,
serializedName: 'instanceType',
type: {
name: 'String'
}
},
lastReplicatedTime: {
required: false,
serializedName: 'lastReplicatedTime',
type: {
name: 'DateTime'
}
},
vmNics: {
required: false,
serializedName: 'vmNics',
type: {
name: 'Sequence',
element: {
required: false,
serializedName: 'VMNicDetailsElementType',
type: {
name: 'Composite',
className: 'VMNicDetails'
}
}
}
},
vmId: {
required: false,
serializedName: 'vmId',
type: {
name: 'String'
}
},
vmProtectionState: {
required: false,
serializedName: 'vmProtectionState',
type: {
name: 'String'
}
},
vmProtectionStateDescription: {
required: false,
serializedName: 'vmProtectionStateDescription',
type: {
name: 'String'
}
},
initialReplicationDetails: {
required: false,
serializedName: 'initialReplicationDetails',
type: {
name: 'Composite',
className: 'InitialReplicationDetails'
}
},
vMDiskDetails: {
required: false,
serializedName: 'vMDiskDetails',
type: {
name: 'Sequence',
element: {
required: false,
serializedName: 'DiskDetailsElementType',
type: {
name: 'Composite',
className: 'DiskDetails'
}
}
}
}
}
}
};
}
} | javascript | 21 | 0.516451 | 97 | 29.132743 | 113 | /**
* Hyper V replica provider specific settings base class.
*
* @extends models['ReplicationProviderSpecificSettings']
*/ | class |
int
ipw2200_init(struct ipw2200_softc *sc)
{
int err;
if (!(sc->sc_flags & IPW2200_FLAG_FW_CACHED)) {
IPW2200_WARN((sc->sc_dip, CE_WARN,
"ipw2200_init(): no firmware is available\n"));
return (DDI_FAILURE);
}
ipw2200_stop(sc);
err = ipw2200_chip_reset(sc);
if (err != DDI_SUCCESS) {
IPW2200_WARN((sc->sc_dip, CE_WARN,
"ipw2200_init(): could not reset adapter\n"));
goto fail;
}
err = ipw2200_load_fw(sc, sc->sc_fw.boot_base, sc->sc_fw.boot_size);
if (err != DDI_SUCCESS) {
IPW2200_WARN((sc->sc_dip, CE_WARN,
"ipw2200_init(): could not load boot code\n"));
goto fail;
}
err = ipw2200_load_uc(sc, sc->sc_fw.uc_base, sc->sc_fw.uc_size);
if (err != DDI_SUCCESS) {
IPW2200_WARN((sc->sc_dip, CE_WARN,
"ipw2200_init(): could not load microcode\n"));
goto fail;
}
ipw2200_master_stop(sc);
ipw2200_ring_hwsetup(sc);
err = ipw2200_load_fw(sc, sc->sc_fw.fw_base, sc->sc_fw.fw_size);
if (err != DDI_SUCCESS) {
IPW2200_WARN((sc->sc_dip, CE_WARN,
"ipw2200_init(): could not load firmware\n"));
goto fail;
}
sc->sc_flags |= IPW2200_FLAG_FW_INITED;
err = ipw2200_config(sc);
if (err != DDI_SUCCESS) {
IPW2200_WARN((sc->sc_dip, CE_WARN,
"ipw2200_init(): device configuration failed\n"));
goto fail;
}
delay(drv_usectohz(delay_config_stable));
return (DDI_SUCCESS);
fail:
ipw2200_stop(sc);
return (err);
} | c | 12 | 0.631273 | 69 | 27.081633 | 49 | /*
* If init failed, it will call stop internally. Therefore, it's unnecessary
* to call ipw2200_stop() when this subroutine is failed. Otherwise, it may
* be called twice.
*/ | function |
public static string ToRelativePath(Uri fullUri, Uri folderUri)
{
Uri relativeUri = folderUri.MakeRelativeUri(fullUri);
string relativePath = relativeUri.ToString();
relativePath = Uri.UnescapeDataString(relativePath);
return relativePath.Replace('/', '\\');
} | c# | 11 | 0.746377 | 63 | 38.571429 | 7 | /// <summary>
/// Converts an absolute path to the file or a folder to the path that is relative to the specified folder.
/// </summary>
/// <param name="fullUri"> Path to make into relative one.</param>
/// <param name="folderUri">Path to the folder to relative to which the result will be.</param>
/// <returns>A path that is leads to the file or folder relative to the specified folder.</returns> | function |
async def here_comes_a_new_challenge(
self, url: Union[str, URL] = None,
) -> None:
if not url:
chosen_uf = random.choice(self.uf_codes)
url = BASE_URL.format(
uf_gazette_code=GAZETTE_CODES[chosen_uf], day="",
)
loop = asyncio.get_running_loop()
async with self.lock:
if self.cfscraper:
del self.cfscraper
self.cfscraper = None
self.cfscraper = await loop.run_in_executor(
None, cfscrape.create_scraper
)
get = partial(self.cfscraper.get, url=url,)
response = await loop.run_in_executor(None, get) | python | 11 | 0.528139 | 65 | 37.555556 | 18 | Uses cfscrape package to tentatively solve
Javascript challenges from Cloudflare. | function |
public <T> T read(VFile vf, Class<T> resultCls) throws IOException
{
try
{
return resultCls.cast(toWebView(vf));
}
catch (ClassCastException ex)
{
return null;
}
} | java | 10 | 0.489712 | 66 | 21.181818 | 11 | /**
* Reads the given web view class from the given file.
*
* @param <T> The web view type.
* @param vf The file to read from.
* @param resultCls The web view class.
* @return The new read object.
* @throws IOException If any IO error occurs.
*/ | function |
class DNSFilterEngine
{
public:
enum class PolicyKind { NoAction, Drop, NXDOMAIN, NODATA, Truncate, Custom};
struct Policy
{
Policy(): d_kind(PolicyKind::NoAction), d_custom(nullptr), d_name(nullptr), d_ttl(0)
{
}
bool operator==(const Policy& rhs) const
{
return d_kind == rhs.d_kind;
}
DNSRecord getCustomRecord(const DNSName& qname) const;
PolicyKind d_kind;
std::shared_ptr<DNSRecordContent> d_custom;
std::shared_ptr<std::string> d_name;
int d_ttl;
};
DNSFilterEngine();
void clear();
void clear(size_t zone);
void reserve(size_t zone, size_t entriesCount) {
assureZones(zone);
d_zones[zone].qpolName.reserve(entriesCount);
}
void addClientTrigger(const Netmask& nm, Policy pol, size_t zone);
void addQNameTrigger(const DNSName& nm, Policy pol, size_t zone);
void addNSTrigger(const DNSName& dn, Policy pol, size_t zone);
void addNSIPTrigger(const Netmask& nm, Policy pol, size_t zone);
void addResponseTrigger(const Netmask& nm, Policy pol, size_t zone);
bool rmClientTrigger(const Netmask& nm, Policy pol, size_t zone);
bool rmQNameTrigger(const DNSName& nm, Policy pol, size_t zone);
bool rmNSTrigger(const DNSName& dn, Policy pol, size_t zone);
bool rmNSIPTrigger(const Netmask& nm, Policy pol, size_t zone);
bool rmResponseTrigger(const Netmask& nm, Policy pol, size_t zone);
Policy getQueryPolicy(const DNSName& qname, const ComboAddress& nm, const std::unordered_map<std::string,bool>& discardedPolicies) const;
Policy getProcessingPolicy(const DNSName& qname, const std::unordered_map<std::string,bool>& discardedPolicies) const;
Policy getProcessingPolicy(const ComboAddress& address, const std::unordered_map<std::string,bool>& discardedPolicies) const;
Policy getPostPolicy(const vector<DNSRecord>& records, const std::unordered_map<std::string,bool>& discardedPolicies) const;
size_t size() {
return d_zones.size();
}
void setPolicyName(size_t zoneIdx, std::string name)
{
assureZones(zoneIdx);
d_zones[zoneIdx].name = std::make_shared<std::string>(name);
}
private:
void assureZones(size_t zone);
struct Zone {
std::unordered_map<DNSName, Policy> qpolName;
NetmaskTree<Policy> qpolAddr;
std::unordered_map<DNSName, Policy> propolName;
NetmaskTree<Policy> propolNSAddr;
NetmaskTree<Policy> postpolAddr;
std::shared_ptr<std::string> name;
};
vector<Zone> d_zones;
} | c++ | 13 | 0.710633 | 139 | 40.083333 | 60 | /* This class implements a filtering policy that is able to fully implement RPZ, but is not bound to it.
In other words, it is generic enough to support RPZ, but could get its data from other places.
We know the following actions:
No action - just pass it on
Drop - drop a query, no response
NXDOMAIN - fake up an NXDOMAIN for the query
NODATA - just return no data for this qtype
Truncate - set TC bit
Modified - "we fake an answer for you"
These actions can be caused by the following triggers:
qname - the query name
client-ip - the IP address of the requestor
response-ip - an IP address in the response
ns-name - the name of a server used in the delegation
ns-ip - the IP address of a server used in the delegation
This means we get several hook points:
1) when the query comes in: qname & client-ip
2) during processing: ns-name & ns-ip
3) after processing: response-ip
Triggers meanwhile can apply to:
Verbatim domain names
Wildcard versions (*.domain.com does NOT match domain.com)
Netmasks (IPv4 and IPv6)
Finally, triggers are grouped in different zones. The "first" zone that has a match
is consulted. Then within that zone, rules again have precedences.
*/ | class |
final class Wonder<T extends Entity> {
private enum Record {
MOST_CONFUSED("Most Confused"),
MOST_WITHERED("Most Withered");
private final String loreLine;
private Record(String loreLine) {
this.loreLine = ChatColor.GOLD + loreLine + ": ";
}
private String getLoreLine() {
return this.loreLine;
}
}
private static final Random RANDOM = new Random();
private static final List<Wonder<? extends Entity>> WONDERS = new ArrayList<>();
private static Set<Player> getNearbyPlayers(Entity entity, int range) {
return entity.getNearbyEntities(range, range, range).stream().filter(e -> e instanceof Player).map(e -> (Player) e).collect(Collectors.toSet());
}
private static ParticleTimer particles() {
return JavaPlugin.getPlugin(WonderBow.class).getParticleTimer();
}
private static void record(Record record, ItemStack stack, int score) {
ItemMeta meta = stack.getItemMeta();
List<String> lore = meta.getLore();
boolean set = false;
final String loreLine = record.getLoreLine();
for (int i = 0; i < lore.size(); i++) {
String string = lore.get(i);
if (string.startsWith(loreLine)) {
int count = Integer.parseInt(string.substring((loreLine).length()));
if (count < score) {
lore.set(i, loreLine + score);
}
set = true;
break;
}
}
if (!set) {
lore.add(loreLine + score);
}
meta.setLore(lore);
stack.setItemMeta(meta);
}
private static final Wonder<Chicken> CHICKEN = new Wonder<>(Chicken.class, 1, chicken -> particles().addEffect(ParticleTimer.Particle.ANGRY_VILLAGER, chicken, 10, 5, bawk -> {
for (int i = 0; i < 15; i++) {
bawk.getWorld().strikeLightning(bawk.getLocation().clone().add(RANDOM.nextDouble() * 4 + 2, RANDOM.nextDouble() * 4 + 2, RANDOM.nextDouble() * 4 + 2));
}
bawk.remove();
}), no());
private static final Wonder<EnderPearl> ENDERS = new Wonder<>(EnderPearl.class, 1, ender -> particles().addEffect(ParticleTimer.Particle.HEART, ender, -1, 1, no()), no()); // BAD LUCK EH
private static final Wonder<WitherSkull> SKULL = new Wonder<>(WitherSkull.class, 1, no(), skull -> {
Set<Player> nearbyPlayers = getNearbyPlayers(skull, 3);
if (!nearbyPlayers.isEmpty() && skull.hasMetadata("WonderShooter")) {
record(Record.MOST_WITHERED, (ItemStack) skull.getMetadata("WonderShooter").get(0).value(), nearbyPlayers.size());
}
nearbyPlayers.forEach(player -> player.addPotionEffect(new PotionEffect(PotionEffectType.WITHER, 200, 1, true)));
});
private static final Wonder<Fireball> FIRE = new Wonder<>(Fireball.class, 1, no(), fireball -> particles().addEffect(ParticleTimer.Particle.SPLASH, fireball.getWorld().spawn(fireball.getLocation(), Cow.class), 40, 1, cow -> {
cow.getWorld().createExplosion(cow.getLocation().getX(), cow.getLocation().getY(), cow.getLocation().getZ(), 3, false, false);
cow.getLocation().add(0, 1, 0).getBlock().setType(Material.WATER);
cow.remove();
}));
private static final Wonder<Arrow> ARROW = new Wonder<>(Arrow.class, 2, arrow -> particles().addEffect(ParticleTimer.Particle.SPELL, arrow, -1, 1, no()), arrow -> {
Set<Player> nearbyPlayers = getNearbyPlayers(arrow, 5);
if (!nearbyPlayers.isEmpty() && arrow.hasMetadata("WonderShooter")) {
record(Record.MOST_CONFUSED, (ItemStack) arrow.getMetadata("WonderShooter").get(0).value(), nearbyPlayers.size());
}
nearbyPlayers.forEach(player -> player.addPotionEffect(new PotionEffect(PotionEffectType.CONFUSION, 200, 4, true)));
arrow.remove();
});
private static final Wonder<Egg> EGG = new Wonder<>(Egg.class, 1, egg -> particles().addEffect(ParticleTimer.Particle.RED, egg, -1, 1, no()), egg -> {
Set<Chicken> chicks = new HashSet<>();
for (int i = 0; i < 30; i++) {
Chicken chick = egg.getWorld().spawn(egg.getLocation(), Chicken.class);
chicks.add(chick);
chick.setVelocity(new Vector(RANDOM.nextFloat() * 0.4, RANDOM.nextFloat() * 1.5, RANDOM.nextFloat() * 0.4));
}
Bukkit.getScheduler().scheduleSyncDelayedTask(JavaPlugin.getPlugin(WonderBow.class), () -> chicks.stream().filter(Entity::isValid).forEach(chicken -> {
chicken.remove();
particles().broadcastEffect(ParticleTimer.Particle.LAVA.toString(), chicken.getLocation(), 0, 20);
}), 40);
});
/**
* Generates a consumer which does nothing.
*
* @param <T> Type to win!
* @return consumer of nothing
*/
static <T> Consumer<T> no() {
return t -> {
};
}
/**
* Gets a random Wonder!
*
* @return a wonder
*/
public static Wonder getWonder() {
return WONDERS.get(RANDOM.nextInt(WONDERS.size()));
}
private final Consumer<T> processHit;
private final Consumer<T> processSpawn;
private final Class<T> entityClass;
private final int weight;
private Wonder(Class<T> entityClass, int weight) {
this(entityClass, weight, no(), no());
}
private Wonder(Class<T> entityClass, int weight, Consumer<T> processSpawn, Consumer<T> processHit) {
this.processHit = processHit;
this.processSpawn = processSpawn;
this.entityClass = entityClass;
this.weight = weight;
for (int i = 0; i < weight; i++) {
WONDERS.add(this);
}
}
/**
* Gets the class to be spawned for this Wonder.
*
* @return spawning class
*/
public Class<T> getEntityClass() {
return this.entityClass;
}
/**
* Process a wonder-spawned entity being hit.
*
* @param entity entity
*/
void onHit(T entity) {
this.processHit.accept(entity);
}
/**
* Process a wonder-spawned entity being spawned.
*
* @param entity spawned entity
*/
void onSpawn(T entity) {
this.processSpawn.accept(entity);
}
/**
* Gets the weight of this Wonder.
*
* @return weight
*/
public int getWeight() {
return this.weight;
}
} | java | 23 | 0.603774 | 229 | 37.872727 | 165 | /**
* Wonders are the magical tricks the WonderBow can do!
*
* @param <T> Entity type of this particular Wonder
*/ | class |
def stub_if_missing_deps(*deps):
def _find_failed_imports():
failed = []
for dep in deps:
parts = dep.split(':')
modname = parts[0]
attrname = parts[1] if len(parts)>1 else None
try:
__import__(modname)
except ImportError as err:
failed.append(str(err).split()[-1])
continue
if attrname and not hasattr(sys.modules[modname], attrname):
failed.append('.'.join([modname, attrname]))
return failed
def _stub_if_missing(obj):
failed = _find_failed_imports()
if failed:
if isclass(obj):
def _error(obj, *args, **kwargs):
msg = "The %s class depends on the following modules or attributes which were not found on your system: %s"
raise RuntimeError(msg % (obj.__name__, failed))
obj.__new__ = staticmethod(_error)
elif isfunction(obj):
body = "raise RuntimeError(\"The %s function depends on the following modules or attributes which were not found on your system: %s\")"
return replace_funct(obj, body % (obj.__name__, failed))
return obj
return _stub_if_missing | python | 18 | 0.534109 | 152 | 45.107143 | 28 | A class decorator that will try to import the specified modules and in
the event of failure will stub out the class, raising a RuntimeError that
explains the missing dependencies whenever an attempt is made to
instantiate the class.
deps: str args
args in deps may have the form a.b.c or a.b.c:attr, where attr would be
searched for within the module a.b.c after a.b.c is successfully imported.
| function |
pub async fn fetch_optional<'e, E>(self, executor: E) -> crate::Result<Option<F::Output>>
where
E: RefExecutor<'e, Database = DB>,
'q: 'e,
{
// could be implemented in terms of `fetch()` but this avoids overhead from `try_stream!`
let mut cursor = executor.fetch_by_ref(self.query);
let mut mapper = self.mapper;
let val = cursor.next().await?;
val.map(|row| mapper.try_map_row(row)).transpose()
} | rust | 10 | 0.593548 | 97 | 41.363636 | 11 | /// Get the first row in the result | function |
void ts::TablesDisplay::displaySectionData(const Section& section, const UString& margin, uint16_t cas)
{
cas = _duck.casId(cas);
DisplaySectionFunction handler = PSIRepository::Instance()->getSectionDisplay(section.tableId(), _duck.standards(), section.sourcePID(), cas);
if (handler != nullptr) {
PSIBuffer buf(_duck, section.payload(), section.payloadSize());
handler(*this, section, buf, margin);
displayExtraData(buf, margin);
}
else {
displayUnkownSectionData(section, margin);
}
} | c++ | 10 | 0.679558 | 146 | 40.846154 | 13 | //----------------------------------------------------------------------------
// Display a section on the output stream.
//---------------------------------------------------------------------------- | function |
HueLight::HueLight(String name, String room, String hueId, byte *server, String userid)
: Device(name, room)
{
_hueId = hueId;
memcpy(_server, server, 4);
_userID = userid;
_value = 0;
_type = 'L';
} | c++ | 6 | 0.592105 | 87 | 24.444444 | 9 | /**
* Constructor
* @param name String name of the checklist item
*/ | function |
static uint8_t HIDUSB_PacketIsCommand(void)
{
size_t i;
for (i = 0; i < sizeof (Command) - 1; i++) {
if (PageData[i] != Command[i]) {
return 0xff;
}
}
for (i++; i < COMMAND_SIZE; i++) {
if (PageData[i]) {
return 0xff;
}
}
return PageData[sizeof (Command) - 1];
} | c | 9 | 0.559859 | 45 | 18 | 15 | /**
* @brief Check if an USB HID packet contains a bootlaoder command.
*
* @return the bootloader command index or 0xff in no command found.
*/ | function |
public void processIntent(Intent intent, String callingPackageName,
boolean canCallNonEmergency) {
if (!isVoiceCapable()) {
return;
}
String action = intent.getAction();
if (Intent.ACTION_CALL.equals(action) ||
Intent.ACTION_CALL_PRIVILEGED.equals(action) ||
Intent.ACTION_CALL_EMERGENCY.equals(action)) {
processOutgoingCallIntent(intent, callingPackageName, canCallNonEmergency);
}
} | java | 9 | 0.617706 | 87 | 40.5 | 12 | /**
* Processes intents sent to the activity.
*
* @param intent The intent.
*/ | function |
public bool AddNewSceneObject(SceneObjectGroup sceneObject, bool attachToBackup, bool sendClientUpdates)
{
if (m_sceneGraph.AddNewSceneObject(sceneObject, attachToBackup, sendClientUpdates))
{
EventManager.TriggerObjectAddedToScene(sceneObject);
return true;
}
return false;
} | c# | 10 | 0.629032 | 104 | 40.444444 | 9 | /// <summary>
/// Add a newly created object to the scene
/// </summary>
/// <param name="sceneObject"></param>
/// <param name="attachToBackup">
/// If true, the object is made persistent into the scene.
/// If false, the object will not persist over server restarts
/// </param>
/// <param name="sendClientUpdates">
/// If true, updates for the new scene object are sent to all viewers in range.
/// If false, it is left to the caller to schedule the update
/// </param> | function |
public static double Basis1(int startDateVal, int endDateVal)
{
SimpleDate startDate = CreateDate(startDateVal);
SimpleDate endDate = CreateDate(endDateVal);
double yearLength;
if (IsGreaterThanOneYear(startDate, endDate))
{
yearLength = AverageYearLength(startDate.year, endDate.year);
}
else if (ShouldCountFeb29(startDate, endDate))
{
yearLength = DAYS_PER_LEAP_YEAR;
}
else
{
yearLength = DAYS_PER_NORMAL_YEAR;
}
return DateDiff(startDate.ticks, endDate.ticks) / yearLength;
} | c# | 12 | 0.541429 | 77 | 35.894737 | 19 | /// <summary>
/// Basis 1, Actual/Actual date convention
/// </summary>
/// <param name="startDateVal">The start date value assumed to be less than or equal to endDateVal.</param>
/// <param name="endDateVal">The end date value assumed to be greater than or equal to startDateVal.</param>
/// <returns></returns> | function |
void Capstone2LlvmIrTranslatorMips_impl::translateLui(cs_insn* i, cs_mips* mi, llvm::IRBuilder<>& irb)
{
assert(mi->op_count == 2);
op1 = loadOp(mi->operands[1], irb);
op1 = irb.CreateZExt(op1, getDefaultType());
op1 = irb.CreateShl(op1, llvm::ConstantInt::get(op1->getType(), 16));
storeOp(mi->operands[0], op1, irb);
} | c++ | 11 | 0.689231 | 102 | 39.75 | 8 | /**
* MIPS_INS_LUI
* This behaves like 32-bit MIPS instruction even on 64-bit MIPS.
*/ | function |
public class Solution {
public static void main(String... args) throws InterruptedException {
execute(1); // foobar
execute(2); // foobarfoobar
}
private static void execute(int n) throws InterruptedException {
FooBar obj = new FooBar(n);
Thread foo = new Thread(() -> {
try {
obj.foo(() -> System.out.print("foo"));
} catch(InterruptedException e) {
e.printStackTrace();
}
});
Thread bar = new Thread(() -> {
try {
obj.bar(() -> System.out.print("bar"));
} catch(InterruptedException e) {
e.printStackTrace();
}
});
foo.start();
bar.start();
foo.join();
bar.join();
System.out.println();
}
} | java | 20 | 0.469484 | 73 | 22.694444 | 36 | /**
* Suppose you are given the following code:
* <pre>
* class FooBar {
* public void foo() {
* for (int i = 0; i < n; i++) {
* print("foo");
* }
* }
*
* public void bar() {
* for (int i = 0; i < n; i++) {
* print("bar");
* }
* }
* }
* </pre>
* The same instance of <tt>FooBar</tt> will be passed to two different <tt>threads</tt>. Thread <tt>A</tt> will call <tt>foo()</tt> while thread
* <tt></tt>B will call <tt>bar()</tt>. Modify the given program to output <tt>"foobar"</tt> <tt>n</tt> times.
* <p>
* <b>Example 1:</b>
* <pre>
* Input: n = 1
* Output: "foobar"
* Explanation: There are two threads being fired asynchronously. One of them calls foo(), while the other calls bar(). "foobar" is being output 1
* time.
* </pre>
* <b>xExample 2:</b>
* <pre>
* Input: n = 2
* Output: "foobarfoobar"
* Explanation: "foobar" is being output 2 times.
* </pre>
*
* @author Oleg Cherednik
* @since 07.04.2020
*/ | class |
protected COABO makeChildCoaboOf(COABO parentCoa, Short accountId, String accountName, String glCode) {
COAHierarchyEntity parentCoah = parentCoa.getCoaHierarchy();
if (parentCoah == null) {
throw new RuntimeException("ParentCoa.coaHierarchy has not been defined");
}
COABO childCoa = new COABO(accountId, accountName, new GLCodeEntity(accountId, glCode));
COAHierarchyEntity hierarchy = new COAHierarchyEntity(childCoa, parentCoa.getCoaHierarchy());
childCoa.setCoaHierarchy(hierarchy);
return childCoa;
} | java | 9 | 0.716263 | 103 | 56.9 | 10 | /**
* Establish child-parent relationship between two COABO instances.
*
* <p>
* ASSUMPTION: the parentCoa's hierarchy has been created. In other words, build the hierarchy from top down.
*
* @throws RuntimeException if parentCoa has no associated COAHierarchy.
*/ | function |
fn on_black_hole_detected<CC: CongestionController>(
&mut self,
now: Timestamp,
congestion_controller: &mut CC,
) {
self.black_hole_counter = Default::default();
self.largest_acked_mtu_sized_packet = None;
// Reset the plpmtu back to the BASE_PLPMTU and notify the congestion controller
self.plpmtu = BASE_PLPMTU;
congestion_controller.on_mtu_update(BASE_PLPMTU);
// Cancel any current probes
self.state = State::SearchComplete;
// Arm the PMTU raise timer to try a larger MTU again after a cooling off period
self.arm_pmtu_raise_timer(now + BLACK_HOLE_COOL_OFF_DURATION);
} | rust | 7 | 0.650442 | 88 | 44.266667 | 15 | /// Called when an excessive number of packets larger than the BASE_PLPMTU have been lost | function |
public void LateUpdate()
{
if (!m_disabled && _bLastHandVisible != _bHandVisible)
{
OnHandVisibleChange(_bHandVisible);
}
} | c# | 10 | 0.471204 | 66 | 26.428571 | 7 | /// <summary>
/// Use LateUpdate to check for whether or not the hand is up
/// </summary> | function |
private static String assertsType( Schema<?> schema)
{
return
schema.getType() == null?
"any" :
!"string".equals( schema.getType())?
schema.getType() :
"binary".equals( schema.getFormat())?
"binary" :
"byte".equals( schema.getFormat())?
"byte" :
"string";
} | java | 12 | 0.5387 | 52 | 23.923077 | 13 | /**
* Returns a description of the type of the given schema.
*/ | function |
func GetCertificate(clientset clientset.Interface, cfgPath string) error {
cfg, err := loadConfig(cfgPath)
if err != nil {
return errors.Wrapf(err, "Failed to load config from path: %v", cfgPath)
}
if isKeyPairValid(certPath, keyPath) {
log.Info("Key pair already exists and is valid")
return nil
}
log.Infof("Continuing to generate key pair...")
keyData, new, err := keyutil.LoadOrGenerateKeyFile(keyPath)
if err != nil {
return errors.Wrapf(err, "Failed to get the private key from: %s", keyPath)
}
if new {
log.Infof("The private key was generated in: %s", keyPath)
}
privateKey, err := keyutil.ParsePrivateKeyPEM(keyData)
if err != nil {
return errors.Wrapf(err, "Failed to parse the private key from: %s", keyPath)
}
err = removeCSR(clientset, cfg.CSR.Name)
if err != nil {
return errors.Wrap(err, "Failed to remove old CSR")
}
csrPEM, err := cert.MakeCSR(privateKey, &cfg.CSR.Subject, cfg.CSR.DNSSANs, cfg.CSR.IPSANs)
if err != nil {
return errors.Wrap(err, "Failed to create CSR")
}
reqName, reqUID, err := csr.RequestCertificate(clientset, csrPEM, cfg.CSR.Name, cfg.Signer, cfg.CSR.KeyUsages,
privateKey)
if err != nil {
return errors.Wrap(err, "CSR Request failed")
}
ctx, cancel := context.WithTimeout(context.Background(), cfg.WaitTimeout.Duration)
defer cancel()
certPEM, err := csr.WaitForCertificate(ctx, clientset, reqName, reqUID)
if err != nil {
return errors.Wrap(err, "Waiting for certifcate failed")
}
err = cert.WriteCert(certPath, certPEM)
if err != nil {
return errors.Wrapf(err, "Failed to write the certificate to: %s", certPath)
}
logger.Info("CSR successfully signed")
return nil
} | go | 10 | 0.710511 | 111 | 34.446809 | 47 | // GetCertificate creates a CSR that needs to be approved and signed by a specific signer.
// The certificate and private key are then dumped to certPath and keyPath respectively. | function |
@Override
void parse(VoltXMLElement stmtNode) {
assert(stmtNode.children.isEmpty());
assert(m_tableList.isEmpty());
addTabletoList(stmtNode, "thetable");
addTabletoList(stmtNode, "othertable");
} | java | 7 | 0.651064 | 47 | 32.714286 | 7 | /**
* Parse the arguments to a SWAP TABLE statement.
* SWAP TABLE statements use simple String attributes
* as the "VoltXML" representation for their target tables.
* They don't need all the complexity of the child table nodes
* used in other kinds of statements.
* SWAP TABLE statements don't bother to populate most of the
* detailed AbstractParsedTable members related to tables.
* The m_tableList is sufficient for SWAP TABLE's minimal
* validation and planning requirements.
*/ | function |
internal class MemoryCache<TKey, TValue>
where TValue : class
{
[NotNull]
private readonly Hashtable _cache = new Hashtable();
[NotNull]
[ContractAnnotation("key:null => halt;valueFactory:null => halt")]
public TValue GetOrCreate([NotNull] TKey key, [NotNull, InstantHandle] Func<TValue> valueFactory)
{
Debug.Assert(key != null);
Debug.Assert(valueFactory != null);
var cachedValue = (TValue)_cache[key];
if (cachedValue != null)
return cachedValue;
lock (_cache)
{
cachedValue = (TValue)_cache[key];
if (cachedValue != null)
return cachedValue;
cachedValue = valueFactory();
_cache[key] = cachedValue;
return cachedValue;
}
}
} | c# | 13 | 0.518313 | 105 | 35.08 | 25 | /// <summary>
/// Represents a type that implements a memory cache.
/// </summary>
/// <typeparam name="TKey">Cache key type.</typeparam>
/// <typeparam name="TValue">Cache value type.</typeparam> | class |
private int FindInfections()
{
int infections = 0;
for (var p = 0; p < _graph.NumPeople(); p++)
{
if (_graph.GetPerson(p).IsInfected())
{
infections++;
}
}
return infections;
} | c# | 12 | 0.359375 | 56 | 25.75 | 12 | /**
* <summary>
* Search through the entire graph to find all infected people
* </summary>
*
* <returns>The number of people currently infected</returns>
*/ | function |
public abstract class LoginService<TCredentials, TPrincipal> : ILoginService<TCredentials>
where TPrincipal : class, IPrincipal
{
private string authenticationCookieName = FormsAuthentication.FormsCookieName;
private int authenticationCookieTimeout = FormsAuthentication.Timeout.Minutes;
public int AuthenticationCookieTimeout
{
get
{
return this.authenticationCookieTimeout;
}
protected set
{
this.authenticationCookieTimeout = value;
}
}
public string AuthenticationCookieName
{
get
{
return this.authenticationCookieName;
}
protected set
{
this.authenticationCookieName = value;
}
}
public bool Login(TCredentials credentials)
{
return this.Login(credentials, false);
}
public bool Login(TCredentials credentials, bool persistLogin)
{
if (this.IsRequestAuthenticated())
{
throw new InvalidOperationException(
"Current request is already authenticated, meaning that the user is already logged in. " +
"Login cannot be performed while the previous session is still active. Please logout," +
" before trying to login again.");
}
bool credentialsAreValid = this.ValidateCredentials(credentials);
if (credentialsAreValid)
{
var userData = this.GetPrincipal(credentials);
var userDataSerialized = JsonConvert.SerializeObject(userData);
var authenticationTicket = new FormsAuthenticationTicket(
2,
userData.Identity.Name,
DateTime.Now,
DateTime.Now.AddMinutes(this.AuthenticationCookieTimeout),
persistLogin,
userDataSerialized);
string encryptedTicket = FormsAuthentication.Encrypt(authenticationTicket);
HttpCookie authCookie = new HttpCookie(this.AuthenticationCookieName, encryptedTicket);
HttpContext.Current.Response.Cookies.Add(authCookie);
return true;
}
return false;
}
public void Logout()
{
var cookie = HttpContext.Current.Response.Cookies[this.authenticationCookieName];
if (cookie != null)
{
cookie.Expires = DateTime.UtcNow.AddYears(-10);
}
}
public bool IsRequestAuthenticated()
{
var user = this.TryGetPrincipal(HttpContext.Current);
return user != null;
}
public void AuthenticateRequest()
{
var user = HttpContext.Current.User = this.TryGetPrincipal(HttpContext.Current);
if (user == null)
{
this.Logout();
}
}
public abstract bool ValidateCredentials(TCredentials credentials);
private TPrincipal TryGetPrincipal(HttpContext context)
{
HttpCookie authCookie = context.Request.Cookies[this.AuthenticationCookieName];
if (authCookie == null || string.IsNullOrWhiteSpace(authCookie.Value))
{
return null;
}
try
{
FormsAuthenticationTicket authTicket = FormsAuthentication.Decrypt(authCookie.Value);
if (authTicket == null)
{
return null;
}
return JsonConvert.DeserializeObject<TPrincipal>(authTicket.UserData);
}
catch (CryptographicException)
{
return null;
}
}
public TPrincipal GetPrincipal()
{
if (!this.IsRequestAuthenticated())
{
return null;
}
return HttpContext.Current.User as TPrincipal;
}
protected abstract TPrincipal GetPrincipal(TCredentials credentials);
} | c# | 19 | 0.726652 | 95 | 27.785714 | 112 | /// <summary>
/// Abstract class providing simple authentication and login mechanism. For authenticated
/// users, the class will allow storing additional data in the session state.
/// </summary>
/// <typeparam name="TCredentials">Type of the credentials.</typeparam>
/// <typeparam name="TPrincipal">Type of the session data.</typeparam> | class |
void trpgMemWriteBuffer::End()
{
if (lengths.size() == 0)
return;
int id = lengths.size()-1;
int32 len = curLen - lengths[id];
int32 rlen = len-sizeof(int32);
if (ness != cpuNess)
rlen = trpg_byteswap_int(rlen);
set(curLen - len,sizeof(int32),(const char *)&rlen);
lengths.resize(id);
} | c++ | 9 | 0.590909 | 56 | 26.583333 | 12 | /* End()
Finished defining an object.
Write the length out where appropriate.
*/ | function |
static struct isl_basic_set *basic_set_append_equalities(
struct isl_basic_set *bset, struct isl_mat *eq)
{
int i, k;
unsigned len;
if (!bset || !eq)
goto error;
bset = isl_basic_set_extend_space(bset, isl_space_copy(bset->dim), 0,
eq->n_row, 0);
if (!bset)
goto error;
len = 1 + isl_space_dim(bset->dim, isl_dim_all) + bset->extra;
for (i = 0; i < eq->n_row; ++i) {
k = isl_basic_set_alloc_equality(bset);
if (k < 0)
goto error;
isl_seq_cpy(bset->eq[k], eq->row[i], eq->n_col);
isl_seq_clr(bset->eq[k] + eq->n_col, len - eq->n_col);
}
isl_mat_free(eq);
bset = isl_basic_set_gauss(bset, NULL);
bset = isl_basic_set_finalize(bset);
return bset;
error:
isl_mat_free(eq);
isl_basic_set_free(bset);
return NULL;
} | c | 11 | 0.622819 | 70 | 25.642857 | 28 | /* Intersect the basic set "bset" with the affine space specified by the
* equalities in "eq".
*/ | function |
func addSidecar(pw *k8testing.PodWrapper) *k8testing.PodWrapper {
pw.Spec.Containers = append(pw.Spec.Containers, v1.Container{
Name: sdkServerSidecarName,
ImagePullPolicy: v1.PullIfNotPresent,
VolumeMounts: []v1.VolumeMount{
{
Name: fmt.Sprintf("%s-%s", defaultServiceAccountName, "token-2kx9e"),
ReadOnly: true,
MountPath: mountPath,
},
},
})
return pw
} | go | 20 | 0.681592 | 78 | 27.785714 | 14 | // addSidecar add side car to test pod | function |
def merge():
paients_merged.remove()
for p in paients_splited_with_excel.find():
nu = p['nu']
merged = []
nu.sort(key=lambda x:x['d']+x['t']+str(x['en'])+str(x['wt']))
for n in nu:
if merged != [] and _is_same_nu(merged[-1], n):
merged[-1]['v'] += n['v']
else:
merged.append(n)
p['nu'] = merged
paients_merged.insert_one(p) | python | 16 | 0.452874 | 69 | 32.538462 | 13 |
merge long and term advice by date
| function |
static int kfd_parse_subtype_cu(struct crat_subtype_computeunit *cu,
struct list_head *device_list)
{
struct kfd_topology_device *dev;
pr_debug("Found CU entry in CRAT table with proximity_domain=%d caps=%x\n",
cu->proximity_domain, cu->hsa_capability);
list_for_each_entry(dev, device_list, list) {
if (cu->proximity_domain == dev->proximity_domain) {
if (cu->flags & CRAT_CU_FLAGS_CPU_PRESENT)
kfd_populated_cu_info_cpu(dev, cu);
if (cu->flags & CRAT_CU_FLAGS_GPU_PRESENT)
kfd_populated_cu_info_gpu(dev, cu);
break;
}
}
return 0;
} | c | 10 | 0.685512 | 76 | 32.352941 | 17 | /* kfd_parse_subtype_cu - parse compute unit subtypes and attach it to correct
* topology device present in the device_list
*/ | function |
static bool Writer(void* context, const void* buffer, uint32_t buffer_size)
{
dmArray<uint8_t>* out = (dmArray<uint8_t>*) context;
if (out->Remaining() < buffer_size) {
int r = out->Remaining();
int offset = dmMath::Max((int) buffer_size - r, 32 * 1024);
out->OffsetCapacity(offset);
}
out->PushArray((const uint8_t*) buffer, buffer_size);
return true;
} | c++ | 12 | 0.559361 | 75 | 38.909091 | 11 | /*# Zlib compression API documentation
*
* Functions for compression and decompression of string buffers.
*
* @document
* @name Zlib
* @namespace zlib
*/ | function |
def angle_conversion(angle, units):
angle_f = tf.cast(angle, "float32")
if units.lower() == "degrees":
angle = 3.14 * angle_f / 180.0
elif units.lower() in ["radians", "rads", "rad"]:
angle = angle_f
return angle | python | 9 | 0.577869 | 53 | 34 | 7 | Converting the angle to the desirable format
Args:
angle (list): list of random angle values
units (str): conversion unit
Returns:
list: converted angle values to the desirable form
| function |
func Post(path string, body string) (responseBody string, status int, header http.Header) {
response := do("POST", path, strings.NewReader(body))
responseBody = readResponseBody(response)
status = response.StatusCode
header = response.Header
return
} | go | 9 | 0.764706 | 91 | 35.571429 | 7 | // Post runs HTTP POST method | function |
public final class DummyRecordWithChildren extends RecordContainer
{
private byte[] _header;
private long _type;
/**
* Create a new holder for a boring record with children
*/
protected DummyRecordWithChildren(byte[] source, int start, int len) {
// Just grab the header, not the whole contents
_header = Arrays.copyOfRange(source, start, start+8);
_type = LittleEndian.getUShort(_header,2);
// Find our children
_children = Record.findChildRecords(source,start+8,len-8);
}
/**
* Return the value we were given at creation
*/
public long getRecordType() { return _type; }
/**
* Write the contents of the record back, so it can be written
* to disk
*/
public void writeOut(OutputStream out) throws IOException {
writeOut(_header[0],_header[1],_type,_children,out);
}
} | java | 10 | 0.636872 | 74 | 28.866667 | 30 | /**
* If we come across a record we know has children of (potential)
* interest, but where the record itself is boring, we create one
* of these. It allows us to get at the children, but not much else
*/ | class |
def create_tarball(self):
tarball_path = ybutils.get_release_file(self.repository,
self.release_name,
self.build_type)
ybutils.log_message(logging.INFO, "Exporting release tarball")
prefix_dir = ""
if self.release_name == "yugabyte":
prefix_dir = "{}-{}".format(
self.release_name, ybutils.get_default_release_version(self.repository))
with tarfile.open(tarball_path, "w:gz") as tar:
for folder_key in self.release_manifest:
if folder_key.startswith("/"):
raise YBOpsRuntimeError("Manifest file keys should not start with /")
for file_pattern in self.release_manifest[folder_key]:
if not file_pattern.startswith("/"):
file_pattern = os.path.join(self.repository, file_pattern)
for file_path in glob.glob(file_pattern):
path_within_tarball = os.path.join(
prefix_dir, folder_key, os.path.basename(file_path))
tar.add(file_path, arcname=path_within_tarball)
ybutils.log_message(logging.INFO, "Generated tarball: {}".format(tarball_path))
return tarball_path | python | 18 | 0.544436 | 89 | 59.909091 | 22 | This method creates a tar file based on the release manifest.
Returns:
(str): returns the tar file
| function |
function addTableCell(tagName, align, content)
{
var startPos = pos,
endPos = startPos + content.length,
ignoreLen;
pos = endPos;
var m = /^( *).*?( *)$/.exec(content);
if (m[1])
{
ignoreLen = m[1].length;
createIgnoreTag(startPos, ignoreLen);
startPos += ignoreLen;
}
if (m[2])
{
ignoreLen = m[2].length;
createIgnoreTag(endPos - ignoreLen, ignoreLen);
endPos -= ignoreLen;
}
createCellTags(tagName, startPos, endPos, align);
} | javascript | 9 | 0.650655 | 50 | 20.857143 | 21 | /**
* Add a cell's tags for current table at current position
*
* @param {string} tagName Either TD or TH
* @param {string} align Either "left", "center", "right" or ""
* @param {string} content Cell's text content
*/ | function |
public class LRUCacheMap<K, V> extends AbstractCacheMap<K, V> {
private final Queue<CachedValue> queue = new ConcurrentLinkedQueue<CachedValue>();
public LRUCacheMap(int size, long timeToLiveInMillis, long maxIdleInMillis) {
super(size, timeToLiveInMillis, maxIdleInMillis);
}
@Override
protected void onValueCreate(CachedValue value) {
queue.add(value);
}
@Override
protected void onValueRemove(CachedValue value) {
queue.remove(value);
}
@Override
protected void onValueRead(CachedValue value) {
// move value to tail of queue
if (queue.remove(value)) {
queue.add(value);
}
}
@Override
protected void onMapFull() {
CachedValue value = queue.poll();
if (value != null) {
map.remove(value.getKey(), value);
}
}
@Override
public void clear() {
queue.clear();
super.clear();
}
} | java | 12 | 0.596954 | 86 | 23.04878 | 41 | /**
* LRU (least recently used) cache.
*
* @author Nikita Koksharov
*
* @param <K>
* @param <V>
*/ | class |
public static void main(String[] args) throws Exception {
String configFile = (args.length ==0 ? MONITOR_CONFIG_FILE : args[0]);
JMXGemFireXDMonitor monitor = new JMXGemFireXDMonitor(configFile);
monitor.connectToAgent();
if (monitor.runMode.equalsIgnoreCase("once")) {
TIMER.schedule(monitor, 0);
} else {
TIMER.schedule(monitor, 0, monitor.pollInterval);
if (monitor.runDuration !=0) {
Thread.sleep(monitor.runDuration);
TIMER.cancel();
}
}
} | java | 11 | 0.702306 | 72 | 33.142857 | 14 | //private final Map<ObjectName, Map<String, GemFireXDStats>> systemStats; | function |
protected static class OpenedClassRemapper extends ClassRemapper {
/**
* Creates a new opened class remapper.
*
* @param classVisitor The class visitor to wrap
* @param remapper The remapper to apply.
*/
protected OpenedClassRemapper(ClassVisitor classVisitor, Remapper remapper) {
super(OpenedClassReader.ASM_API, classVisitor, remapper);
}
} | java | 8 | 0.495479 | 97 | 45.166667 | 12 | /**
* A {@link ClassRemapper} that uses the Byte Buddy-defined API version.
*/ | class |
func WKT(geo tegola.Geometry) string {
switch g := geo.(type) {
default:
return ""
case tegola.Point:
if g == nil {
return "POINT EMPTY"
}
return "POINT (" + wkt(g) + ")"
case tegola.Point3:
if g == nil {
return "POINT M EMPTY"
}
return "POINT M (" + wkt(g) + ")"
case tegola.MultiPoint:
if g == nil {
return "MULTIPOINT EMPTY"
}
return "MULTIPOINT " + wkt(g)
case tegola.LineString:
if g == nil {
return "LINESTRING EMPTY"
}
return "LINESTRING " + wkt(g)
case tegola.MultiLine:
if g == nil {
return "MULTILINE EMPTY"
}
return "MULTILINE " + wkt(g)
case tegola.Polygon:
if g == nil {
return "POLYGON EMPTY"
}
return "POLYGON " + wkt(g)
case tegola.MultiPolygon:
if g == nil {
return "MULTIPOLYGON EMPTY"
}
return "MULTIPOLYGON " + wkt(g)
case tegola.Collection:
if g == nil {
return "GEOMETRYCOLLECTION EMPTY"
}
var geometries []string
for _, sg := range g.Geometries() {
s := WKT(sg)
geometries = append(geometries, s)
}
return "GEOMETRYCOLLECTION (" + strings.Join(geometries, ",") + ")"
}
} | go | 12 | 0.611009 | 69 | 20.392157 | 51 | //WKT returns a WKT representation of the Geometry if possible.
// the Error will be non-nil if geometry is unknown. | function |
protected static PadMode convertEdgeMode(Element filterElement, BridgeContext ctx) {
String s = filterElement.getAttributeNS(null, SVG_EDGE_MODE_ATTRIBUTE);
if (s.length() == 0) {
return PadMode.REPLICATE;
}
if (SVG_DUPLICATE_VALUE.equals(s)) {
return PadMode.REPLICATE;
}
if (SVG_WRAP_VALUE.equals(s)) {
return PadMode.WRAP;
}
if (SVG_NONE_VALUE.equals(s)) {
return PadMode.ZERO_PAD;
}
throw new BridgeException(ctx, filterElement, ERR_ATTRIBUTE_VALUE_MALFORMED,
new Object[] { SVG_EDGE_MODE_ATTRIBUTE, s });
} | java | 8 | 0.702555 | 84 | 31.294118 | 17 | /**
* Convert the 'edgeMode' attribute of the specified feConvolveMatrix filter
* primitive element.
*
* @param filterElement the feConvolveMatrix filter primitive element
* @param ctx the BridgeContext to use for error information
*/ | function |
public T ask() {
String reply;
while (true) {
EnumSet<T> options = EnumSet.allOf(enumClass);
callback.say(format("[?] %s", question));
callback.say(format(" Options: %s",
options.stream().map(Enum::toString).map(String::toLowerCase).collect(joining(", "))));
reply = callback.ask();
reply = reply.trim().toLowerCase();
if (reply.isEmpty()) {
callback.say("[!] Hmm ... Looks like an empty string ... Please try again.");
continue;
}
for (T option : options) {
if (option.toString().toLowerCase().equals(reply)) {
return option;
}
}
callback.say(format("[!] What the heck is '%s'? Please try again?%n", reply));
}
} | java | 15 | 0.552561 | 97 | 34.380952 | 21 | /**
* Asks a user a question and reads their reply.
*
* @return The user's answer.
*/ | function |
func (n NGINXController) IsReloadRequired(data []byte) bool {
in, err := os.Open(cfgPath)
if err != nil {
return false
}
src, err := ioutil.ReadAll(in)
in.Close()
if err != nil {
return false
}
if !bytes.Equal(src, data) {
tmpfile, err := ioutil.TempFile("", "nginx-cfg-diff")
if err != nil {
glog.Errorf("error creating temporal file: %s", err)
return false
}
defer tmpfile.Close()
err = ioutil.WriteFile(tmpfile.Name(), data, 0644)
if err != nil {
return false
}
diffOutput, err := diff(src, data)
if err != nil {
glog.Errorf("error computing diff: %s", err)
return true
}
if glog.V(2) {
glog.Infof("NGINX configuration diff\n")
glog.Infof("%v", string(diffOutput))
}
return len(diffOutput) > 0
}
return false
} | go | 12 | 0.631783 | 61 | 21.794118 | 34 | // IsReloadRequired check if the new configuration file is different
// from the current one. | function |
ReferenceSet::ReferenceSet(const std::string& filename_)
: filename( filename_ )
, faiFilename( filename + ".fai" )
{
std::cout << "file: " << filename << std::endl;
std::cout << "fai: " << faiFilename << std::endl;
std::fstream in(filename.c_str(), std::ios::binary | std::ios::in);
RecordReader<std::fstream, SinglePass<>> reader(in);
if (read2(ids, seqs, reader, seqan::Fasta()) != 0)
throw std::runtime_error("Invalid Fasta file");
seqCount = length(seqs);
size = 0;
resize(seqs, 2*seqCount, Exact());
for (size_t i = 0; i < seqCount; ++i)
{
TDna rcSeq = seqs[i];
reverseComplement(rcSeq);
seqs[i+seqCount] = rcSeq;
size += 2*length(rcSeq);
}
Records.resize(2*seqCount);
for (size_t i = 0; i < length(seqs); ++i)
{
if (i >= seqCount)
{
Records[i].id = ids[i-seqCount];
Records[i].seq = &seqs[i];
Records[i].orientation = 1;
} else {
Records[i].id = ids[i];
Records[i].seq = &seqs[i];
Records[i].orientation = 0;
}
}
} | c++ | 13 | 0.462198 | 75 | 35.685714 | 35 | // When initialized, read the file into memory | function |
fn _reverse_search(&self, state: &SearchState, goal: char) -> SearchState {
let mut new_state = SearchState {
curr_char: goal,
lo: 0,
hi: 0,
occ_lo: 0,
occ_hi: 0,
did_fail: false
};
// Check if character exists in our transform somewhere
if self.amount_of.get(&goal).is_none() {
new_state.did_fail = true;
return new_state;
}
// Update the lo and hi occurences
new_state.occ_lo = self._rank(goal, state.lo);
new_state.occ_hi = self._rank(goal, state.hi);
// Update lo and hi according to matches
new_state.lo = self.c_table[&goal] + new_state.occ_lo;
new_state.hi = self.c_table[&goal] + new_state.occ_hi;
if new_state.lo >= new_state.hi {
new_state.did_fail = true;
}
new_state
} | rust | 10 | 0.519912 | 75 | 35.2 | 25 | /**
* Finds the amount of characters `goal` that occurs before state.curr_char,
* and updates the state accordingly
*/ | function |
pub fn exit_code(&self) -> i32 {
match self {
&RedoErrorKind::FailedInAnotherThread { .. } => EXIT_FAILED_IN_ANOTHER_THREAD,
&RedoErrorKind::InvalidTarget(_) => EXIT_INVALID_TARGET,
&RedoErrorKind::CyclicDependency => EXIT_CYCLIC_DEPENDENCY,
&RedoErrorKind::ImmediateExit(code) => code,
_ => EXIT_FAILURE,
}
} | rust | 11 | 0.578406 | 90 | 42.333333 | 9 | /// Returns the exit code for the error kind. | function |
public class ResetBucketProperties extends AsIsRiakCommand<Void, Namespace>
{
private final Namespace namespace;
public ResetBucketProperties(Builder builder)
{
this.namespace = builder.namespace;
}
@Override
protected ResetBucketPropsOperation buildCoreOperation() {
ResetBucketPropsOperation.Builder builder =
new ResetBucketPropsOperation.Builder(namespace);
return builder.build();
}
public static class Builder
{
private final Namespace namespace;
public Builder(Namespace namespace)
{
if (namespace == null)
{
throw new IllegalArgumentException("Namespace cannot be null");
}
this.namespace = namespace;
}
public ResetBucketProperties build()
{
return new ResetBucketProperties(this);
}
}
} | java | 13 | 0.622538 | 79 | 24.416667 | 36 | /**
* Command used to reset the properties of a bucket in Riak.
* <p>
* <pre class="prettyprint">
* {@code
* Namespace ns = new Namespace("my_type", "my_bucket");
* ResetBucketProperties rbp =
* new ResetBucketProperties.Builder(ns)
* .build();
* client.execute(rbp);}</pre>
* </p>
* @author Chris Mancini <cmancini at basho dot com>
* @since 2.0
*/ | class |
static void invert(long[][] A) {
int[] perm = new int[A.length];
for (int k = 0; k < A.length; ++k) {
perm[k] = k;
long[] baseRow = A[k];
for (int c = k; c < A.length; ++c) {
if (baseRow[c] != GF.ZERO) {
perm[k] = c;
break;
}
}
int colIdx = perm[k];
long m = GF.rev(baseRow[colIdx]);
baseRow[colIdx] = baseRow[k];
baseRow[k] = GF.UNIT;
for (int c = 0; c < baseRow.length; ++c) {
baseRow[c] = GF.mul(baseRow[c], m);
}
for (int r = 0; r < A.length; ++r) {
if (r == k) continue;
long[] curRow = A[r];
m = curRow[colIdx];
curRow[colIdx] = curRow[k];
curRow[k] = GF.ZERO;
for (int c = 0; c < curRow.length; ++c) {
curRow[c] = GF.sub(curRow[c], GF.mul(baseRow[c], m));
}
}
}
for (int r = perm.length - 1; r >= 0; --r) {
if (perm[r] != r) {
long[] t = A[r];
A[r] = A[perm[r]];
A[perm[r]] = t;
}
}
} | java | 16 | 0.338316 | 73 | 33.378378 | 37 | /**
* In-place matrix inversion
* @param A input-output matrix
* throws {@code IllegalArgumentException} for a singular matrix
*/ | function |
@Configuration
public class MySrpingMVCConfig extends WebMvcConfigurerAdapter {
// 自定义拦截器
@Override
public void addInterceptors(InterceptorRegistry registry) {
HandlerInterceptor handlerInterceptor = new HandlerInterceptor() {
@Override
public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler)
throws Exception {
System.out.println("自定义拦截器............");
return true;
}
@Override
public void postHandle(HttpServletRequest request, HttpServletResponse response, Object handler,
ModelAndView modelAndView) throws Exception {
}
@Override
public void afterCompletion(HttpServletRequest request, HttpServletResponse response, Object handler,
Exception ex) throws Exception {
}
};
registry.addInterceptor(handlerInterceptor).addPathPatterns("/**");
}
// 自定义消息转化器的第二种方法
@Override
public void configureMessageConverters(List<HttpMessageConverter<?>> converters) {
StringHttpMessageConverter converter = new StringHttpMessageConverter(Charset.forName("UTF-8"));
converters.add(converter);
}
} | java | 15 | 0.630045 | 113 | 35.189189 | 37 | /**
* Created by Xianda Xu on 2018/1/28.
*/ | class |
func TestSearch(t *testing.T) {
fileName := "pp.txt"
tests := []string{
`discretion`,
`exclamation`,
`want`,
`fortune`,
`objection`,
`name`,
`daughters`,
`pleasure`,
`Mrs.`,
`asd`,
`importance`,
`neighbour`,
}
text, err := StringFromFile(fileName)
if err != nil {
t.Errorf("Test Fail: StringFromFile Error: %s\n", err.Error())
}
words := strings.Fields(text)
streeng := MakeStreeng(words)
for _, test := range tests {
i := 0
for _, word := range streeng.words {
if strings.Compare(test, word) == 0 {
i++
}
}
results := streeng.Search(test)
if len(results) == i {
t.Logf("Test Successful: word: %s", test)
} else {
t.Errorf("Test Fail:\t word: %s \t expected: %d \t result: %d",
test, i, len(results))
}
}
} | go | 12 | 0.582474 | 66 | 19.447368 | 38 | /*
Test functions wrote for PRIDE AND PREJUDICE book text.
*/ | function |
def check_floating_round_errors(
some_list: List[Union[List[float], float]]
) -> List[Union[List[float], float]]:
might_as_well_be_zero = (
1e-8
)
res = []
for item in some_list:
if isinstance(item, list):
res.append(check_floating_round_errors(item))
else:
if abs(item) < might_as_well_be_zero:
item = 0.0
res.append(item)
return res | python | 14 | 0.541284 | 57 | 28.133333 | 15 | Check whether there are some float rounding errors
(check only for close to zero numbers)
:param some_list: Must be a list of either lists or float values
:type some_list: list
| function |
[CakeMethodAlias]
public static void TestUnity3DProject(this ICakeContext context, FilePath projectFolder, Unity3DTestOptions options)
{
var unityTestContext = new Unity3DTestContext(context, projectFolder, options);
unityTestContext.DumpOptions();
unityTestContext.Test();
} | c# | 11 | 0.692537 | 124 | 47 | 7 | /// <summary>
/// Test a provided Unity3D project with the specified test options.
/// </summary>
/// <param name="context">The active cake context.</param>
/// <param name="projectFolder">The absolute path to the Unity3D project to test.</param>
/// <param name="options">The test options to use when testing the project.</param> | function |
@RunWith(SpringRunner.class)
@SpringBootTest(webEnvironment = WebEnvironment.NONE, classes = BaseCassandraTest.Config.class)
@Import(value = { ClientDatastoreCassandra.class, ClientDAO.class, ClientSessionDAO.class, BaseCassandraTest.Config.class, BaseCassandraDataSource.class })
@Ignore("Ignore Cassandra Tests until we can set up a cassandra cluster for the integration testing")
public class ClientDatastoreCassandraTests extends BaseClientDatastoreTest {
@Override
protected List<String> getClientPagination_ExpectedPage3Strings() {
//in cassandra the sort order is weird but consistent - although it may change on the cassandra server restart
return new ArrayList<>(Arrays.asList(new String[]{"qr_10", "qr_13", "qr_8", "qr_18", "qr_42", "qr_30", "qr_35", "qr_28", "qr_14", "qr_22" }));
}
@Override
protected List<String> getClientPagination_ExpectedPage1EmptySortStrings(){
return new ArrayList<>(Arrays.asList(new String[]{"qr_32", "qr_0", "qr_12", "qr_25", "qr_40", "qr_39", "qr_29", "qr_41", "qr_4", "qr_44" }));
}
@Override
protected List<String> getClientPagination_ExpectedPage1SingleSortDescStrings() {
return getClientPagination_ExpectedPage1EmptySortStrings();
}
@Override
protected List<String> getClientSessionPagination_expectedPage1EmptySortStrings(){
return new ArrayList<>(Arrays.asList(new String[]{"qr_12", "qr_47", "qr_4", "qr_1", "qr_21", "qr_42", "qr_45", "qr_35", "qr_14", "qr_0" }));
}
@Override
protected List<String> getClientSessionPagination_expectedPage3Strings(){
return new ArrayList<>(Arrays.asList(new String[]{"qr_28", "qr_32", "qr_6", "qr_19", "qr_49", "qr_31", "qr_33", "qr_25", "qr_7", "qr_40" }));
}
@Override
protected List<String> getClientSessionPagination_expectedPage1SingleSortDescStrings() {
return getClientSessionPagination_expectedPage1EmptySortStrings();
}
} | java | 13 | 0.69223 | 155 | 44.813953 | 43 | /**
* @author dtoptygin
* NOTE: these tests require an cluster of Cassandra nodes to be up. Need to find a solution for small in-memory cluster for unit tests.
* We'll not run these tests during the CI/CD build for now, they need to be manually executed.
*/ | class |
public bool GetReadableSPIRV(ShaderFile shaderFile, out List<string> spirvOutput)
{
List<string> output = new List<string>();
spirvOutput = output;
string title = name;
string msg;
if (ReferenceCompiler.Locate() == null)
{
msg = "Could not locate the glslang reference compiler (glslangvalidator.exe) in system path!";
VsShellUtilities.ShowMessageBox(ServiceProvider, msg, title, OLEMSGICON.OLEMSGICON_CRITICAL, OLEMSGBUTTON.OLEMSGBUTTON_OK, OLEMSGDEFBUTTON.OLEMSGDEFBUTTON_FIRST);
OutputWindow.Add(msg);
return false;
}
List<string> validatorOutput;
bool res = ReferenceCompiler.GetHumanReadableSPIRV(shaderFile.fileName, out validatorOutput);
if (res)
{
spirvOutput = validatorOutput;
}
else
{
msg = string.Format(CultureInfo.CurrentCulture, "Could not get human readable SPIR-V for shader \"{0}\" ", shaderFile.fileName) + "\n";
Debug.Write(msg);
VsShellUtilities.ShowMessageBox(ServiceProvider, msg, title, OLEMSGICON.OLEMSGICON_CRITICAL, OLEMSGBUTTON.OLEMSGBUTTON_OK, OLEMSGDEFBUTTON.OLEMSGDEFBUTTON_FIRST);
ParseErrors(validatorOutput, shaderFile);
msg += string.Join("\n", validatorOutput);
OutputWindow.Add(msg);
}
return res;
} | c# | 13 | 0.592763 | 178 | 49.7 | 30 | /// <summary>
/// Returns the human readble SPIR-V representation of the shader
/// </summary> | function |
function renderExistingHistory() {
var cities = getLocal();
for (i = 0; i < cities.length; i++) {
addToHistory(cities[i]);
}
if (cities.length === 0) {
currentWeather("Washington DC");
} else {
currentWeather(cities[cities.length -1]);
}
} | javascript | 12 | 0.496933 | 52 | 28.727273 | 11 | //Function to render history list from localstorage | function |
public class RobotParser implements PsiParser {
private static void done(@Nullable PsiBuilder.Marker marker, @NotNull RobotElementType type) {
if (marker != null) {
marker.done(type);
}
}
private static void parseFileTopLevel(@NotNull PsiBuilder builder) {
while (!builder.eof()) {
IElementType tokenType = builder.getTokenType();
if (RobotTokenTypes.HEADING == tokenType) {
parseHeading(builder);
} else {
builder.advanceLexer();
}
}
}
private static void parseHeading(@NotNull PsiBuilder builder) {
assert RobotTokenTypes.HEADING == builder.getTokenType();
PsiBuilder.Marker headingMarker = null;
while (true) {
IElementType type = builder.getTokenType();
if (RobotTokenTypes.HEADING == type) {
done(headingMarker, RobotTokenTypes.HEADING);
headingMarker = builder.mark();
builder.advanceLexer();
}
if (builder.eof()) {
done(headingMarker, RobotTokenTypes.HEADING);
break;
} else {
type = builder.getTokenType();
if (RobotTokenTypes.HEADING == type) {
//noinspection UnnecessaryContinue
continue;
} else if (RobotTokenTypes.IMPORT == type) {
parseImport(builder);
} else if (RobotTokenTypes.VARIABLE_DEFINITION == type && isNextToken(builder, RobotTokenTypes.WHITESPACE)) {
parseVariableDefinition(builder);
} else if (RobotTokenTypes.SETTING == type) {
parseSetting(builder);
} else if (RobotTokenTypes.KEYWORD_DEFINITION == type ||
RobotTokenTypes.VARIABLE_DEFINITION == type && isNextToken(builder, RobotTokenTypes.KEYWORD_DEFINITION)) {
parseKeywordDefinition(builder);
} else if (RobotTokenTypes.KEYWORD == type) {
parseKeywordStatement(builder, RobotTokenTypes.KEYWORD_STATEMENT, false);
} else {
// other types; error
//System.out.println(type);
builder.advanceLexer();
}
}
}
}
private static void parseKeywordDefinition(@NotNull PsiBuilder builder) {
PsiBuilder.Marker keywordMarker = null;
PsiBuilder.Marker keywordIdMarker = null;
while (true) {
IElementType type = builder.getTokenType();
if (RobotTokenTypes.KEYWORD_DEFINITION == type ||
RobotTokenTypes.VARIABLE_DEFINITION == type && isNextToken(builder, RobotTokenTypes.KEYWORD_DEFINITION)) {
if (builder.rawLookup(-1) != RobotTokenTypes.VARIABLE_DEFINITION) {
done(keywordIdMarker, RobotTokenTypes.KEYWORD_DEFINITION_ID);
done(keywordMarker, RobotTokenTypes.KEYWORD_DEFINITION);
keywordMarker = builder.mark();
keywordIdMarker = builder.mark();
}
if (RobotTokenTypes.KEYWORD_DEFINITION == type) {
builder.advanceLexer();
}
}
if (builder.eof()) {
done(keywordIdMarker, RobotTokenTypes.KEYWORD_DEFINITION_ID);
done(keywordMarker, RobotTokenTypes.KEYWORD_DEFINITION);
break;
} else {
type = builder.getTokenType();
// not all the time; all cases but VAR_DEF (when in keyword definition only)
if (RobotTokenTypes.HEADING == type) {
done(keywordIdMarker, RobotTokenTypes.KEYWORD_DEFINITION_ID);
done(keywordMarker, RobotTokenTypes.KEYWORD_DEFINITION);
break;
} else if (RobotTokenTypes.BRACKET_SETTING == type) {
done(keywordIdMarker, RobotTokenTypes.KEYWORD_DEFINITION_ID);
keywordIdMarker = null;
parseBracketSetting(builder);
} else if (RobotTokenTypes.ERROR == type) {
// not sure
builder.advanceLexer();
} else if (RobotTokenTypes.VARIABLE_DEFINITION == type) {
PsiBuilder.Marker statement = parseKeywordStatement(builder, RobotTokenTypes.VARIABLE_DEFINITION, true);
if (statement != null && keywordIdMarker != null) {
keywordIdMarker.doneBefore(RobotTokenTypes.KEYWORD_DEFINITION_ID, statement);
keywordIdMarker = null;
}
} else {
done(keywordIdMarker, RobotTokenTypes.KEYWORD_DEFINITION_ID);
keywordIdMarker = null;
parseKeywordStatement(builder, RobotTokenTypes.KEYWORD_STATEMENT, false);
}
}
}
}
private static PsiBuilder.Marker parseKeywordStatement(@NotNull PsiBuilder builder, @NotNull IElementType rootType, boolean skipGherkin) {
PsiBuilder.Marker keywordStatementMarker = builder.mark();
boolean seenGherkin = skipGherkin;
boolean seenKeyword = false;
boolean inline = false;
while (!builder.eof()) {
IElementType type = builder.getTokenType();
if (type == RobotTokenTypes.GHERKIN) {
// if we see a keyword or variable there should be no Gherkin unless we are on a new statement
if (seenGherkin || seenKeyword) {
break;
} else {
seenGherkin = true;
// nothing to do for this
builder.advanceLexer();
}
} else if (type == RobotTokenTypes.KEYWORD ||
(type == RobotTokenTypes.VARIABLE && isNextToken(builder, RobotTokenTypes.KEYWORD))) {
if (seenKeyword) {
break;
} else {
seenKeyword = true;
parseKeyword(builder);
}
} else if ((type == RobotTokenTypes.ARGUMENT || type == RobotTokenTypes.VARIABLE) &&
builder.rawLookup(1) != RobotTokenTypes.KEYWORD) {
parseWith(builder, RobotTokenTypes.ARGUMENT);
} else if (type == RobotTokenTypes.VARIABLE_DEFINITION) {
if (seenKeyword) {
break;
} else {
seenKeyword = true;
boolean isPartOfKeywordDefinition = builder.rawLookup(-1) == RobotTokenTypes.KEYWORD_DEFINITION ||
isNextToken(builder, RobotTokenTypes.KEYWORD_DEFINITION);
PsiBuilder.Marker id = builder.mark();
builder.advanceLexer();
done(id, RobotTokenTypes.VARIABLE_DEFINITION_ID);
inline = isPartOfKeywordDefinition;
if (!isPartOfKeywordDefinition && builder.getTokenType() == RobotTokenTypes.KEYWORD) {
parseKeywordStatement(builder, RobotTokenTypes.KEYWORD_STATEMENT, true);
}
}
} else {
// other types; error?
//System.out.println(type);
break;
}
}
keywordStatementMarker.done(rootType);
return inline ? null : keywordStatementMarker;
}
/**
* Checks to see if the next token in the builder is the given token. In the case that the given token
* is whitespace then we also allow for EOF.
*
* @param builder the spi builder that we are parsing.
* @param type the element type to check for.
* @return true if the next element type matches the given or the given is whitespace and we are at EOF.
*/
private static boolean isNextToken(@NotNull PsiBuilder builder, IElementType type) {
boolean allowEof = type == RobotTokenTypes.WHITESPACE;
IElementType next = builder.rawLookup(1);
return next == type ||
allowEof && next == null;
}
private static void parseKeyword(@NotNull PsiBuilder builder) {
parseWith(builder, RobotTokenTypes.KEYWORD);
}
private static void parseBracketSetting(@NotNull PsiBuilder builder) {
parseWithArguments(builder, RobotTokenTypes.BRACKET_SETTING);
}
private static void parseImport(@NotNull PsiBuilder builder) {
parseWithArguments(builder, RobotTokenTypes.IMPORT);
}
private static void parseVariableDefinition(@NotNull PsiBuilder builder) {
parseWithArguments(builder, RobotTokenTypes.VARIABLE_DEFINITION);
}
private static void parseVariableDefinitionWithDefaults(@NotNull PsiBuilder builder) {
IElementType type = builder.getTokenType();
assert RobotTokenTypes.VARIABLE_DEFINITION == type;
PsiBuilder.Marker argMarker = builder.mark();
PsiBuilder.Marker definitionMarker = builder.mark();
PsiBuilder.Marker definitionIdMarker = builder.mark();
builder.advanceLexer();
definitionIdMarker.done(RobotTokenTypes.VARIABLE_DEFINITION_ID);
definitionMarker.done(RobotTokenTypes.VARIABLE_DEFINITION);
IElementType token = builder.getTokenType();
while (!builder.eof() && (token == RobotTokenTypes.ARGUMENT || token == RobotTokenTypes.VARIABLE)) {
PsiBuilder.Marker variableMarker = null;
if (token == RobotTokenTypes.VARIABLE) {
variableMarker = builder.mark();
}
builder.advanceLexer();
if (token == RobotTokenTypes.VARIABLE) {
done(variableMarker, RobotTokenTypes.VARIABLE);
}
token = builder.getTokenType();
}
argMarker.done(RobotTokenTypes.ARGUMENT);
}
private static void parseSetting(@NotNull PsiBuilder builder) {
parseWithArguments(builder, RobotTokenTypes.SETTING);
}
private static void parseWithArguments(@NotNull PsiBuilder builder, @NotNull IElementType markType) {
IElementType type = builder.getTokenType();
assert markType == type;
PsiBuilder.Marker importMarker = builder.mark();
PsiBuilder.Marker id = null;
if (type == RobotTokenTypes.VARIABLE_DEFINITION) {
id = builder.mark();
}
builder.advanceLexer();
if (id != null) {
id.done(RobotTokenTypes.VARIABLE_DEFINITION_ID);
}
while (!builder.eof()) {
type = builder.getTokenType();
if (RobotTokenTypes.ARGUMENT == type || RobotTokenTypes.VARIABLE == type) {
parseWith(builder, RobotTokenTypes.ARGUMENT);
} else if (markType != RobotTokenTypes.VARIABLE_DEFINITION && RobotTokenTypes.VARIABLE_DEFINITION == type) {
// we check the first two to see if we are in a new statement; the third handles ... cases
if (builder.rawLookup(-1) == RobotTokenTypes.WHITESPACE &&
builder.rawLookup(-2) == RobotTokenTypes.WHITESPACE &&
builder.rawLookup(-3) != RobotTokenTypes.WHITESPACE) {
break;
}
parseVariableDefinitionWithDefaults(builder);
} else {
break;
}
}
importMarker.done(markType);
}
private static void parseWith(@NotNull PsiBuilder builder, @NotNull IElementType type) {
PsiBuilder.Marker arg = builder.mark();
IElementType current = builder.getTokenType();
while (!builder.eof() && current != null && (type == current || RobotTokenTypes.VARIABLE == current || RobotTokenTypes.VARIABLE_DEFINITION == current)) {
boolean end = isNextToken(builder, RobotTokenTypes.WHITESPACE);
if (RobotTokenTypes.VARIABLE == current || RobotTokenTypes.VARIABLE_DEFINITION == current) {
parseSimple(builder, current);
} else {
builder.advanceLexer();
}
if (end) {
break;
}
current = builder.getTokenType();
}
arg.done(type);
}
private static void parseSimple(@NotNull PsiBuilder builder, @NotNull IElementType type) {
assert builder.getTokenType() == type;
PsiBuilder.Marker argumentMarker = builder.mark();
builder.advanceLexer();
argumentMarker.done(type);
}
@NotNull
@Override
public ASTNode parse(@NotNull IElementType root, @NotNull PsiBuilder builder) {
final PsiBuilder.Marker marker = builder.mark();
parseFileTopLevel(builder);
marker.done(RobotTokenTypes.FILE);
return builder.getTreeBuilt();
}
} | java | 21 | 0.578895 | 161 | 44.205575 | 287 | /**
* @author Stephen Abrams
*/ | class |
def justify(lines, width):
for line in lines:
extra_spaces = width - length_of_line(line)
spaces = list(' ' * extra_spaces)
inx = 0
while spaces:
if inx >= len(line) - 1:
inx = 0
space = spaces.pop()
line[inx] += space
inx += 1
return lines | python | 12 | 0.465116 | 51 | 27.75 | 12 |
Justify text on lines with aligned left and right edges
Uses a list of extra spaces that must be interspersed
between the words
| function |
@SuppressWarnings("unchecked")
public static boolean checkQueryExpression( String checkType, Object data,
Map<String, Query[]> queryDefinitionsMap,
ExtendedItemHandle itemHandle, ReportDataServiceProvider provider )
{
if ( data == null || "".equals( data ) )
{
return true;
}
ExpressionCodec exprCodec = ChartModelHelper.instance( )
.createExpressionCodec( );
String categoryDimension = null;
String yOptionDimension = null;
String categoryBindName = null;
String yOptionBindName = null;
String expression = (String) data;
Iterator<ComputedColumnHandle> columnBindings = null;
if ( ChartReportItemHelper.instance( )
.getBindingCubeHandle( itemHandle ) != null
&& provider.isInheritanceOnly( )
|| provider.isSharedBinding( ) )
{
ReportItemHandle reportItemHandle = provider.getReportItemHandle( );
columnBindings = reportItemHandle.getColumnBindings( ).iterator( );
}
else if ( ChartReportItemHelper.instance( )
.getBindingCubeHandle( itemHandle ) != null
|| ( provider.isInXTabMeasureCell( ) && !provider.isPartChart( ) ) )
{
columnBindings = getAllColumnBindingsIterator( itemHandle );
}
if ( ChartUIConstants.QUERY_OPTIONAL.equals( checkType ) )
{
String categoryExpr = null;
Query[] querys = queryDefinitionsMap.get( ChartUIConstants.QUERY_CATEGORY );
if ( querys != null && querys.length > 0 )
{
categoryExpr = querys[0].getDefinition( );
}
if ( categoryExpr == null || "".equals( categoryExpr ) )
{
return true;
}
categoryBindName = exprCodec.getCubeBindingName( categoryExpr, true );
yOptionBindName = exprCodec.getCubeBindingName( expression, true );
}
else if ( ChartUIConstants.QUERY_CATEGORY.equals( checkType ) )
{
String yOptionExpr = null;
Query[] querys = queryDefinitionsMap.get( ChartUIConstants.QUERY_OPTIONAL );
if ( querys != null && querys.length > 0 )
{
yOptionExpr = querys[0].getDefinition( );
}
if ( yOptionExpr == null || "".equals( yOptionExpr ) )
{
return true;
}
categoryBindName = exprCodec.getCubeBindingName( expression, true );
yOptionBindName = exprCodec.getCubeBindingName( yOptionExpr, true );
}
if ( columnBindings == null )
{
return true;
}
while ( columnBindings.hasNext( ) )
{
ComputedColumnHandle columnHandle = columnBindings.next( );
ChartItemUtil.loadExpression( exprCodec, columnHandle );
String bindName = columnHandle.getName( );
if ( !exprCodec.isDimensionExpresion( ) )
{
continue;
}
if ( bindName.equals( categoryBindName ) )
{
categoryDimension = exprCodec.getLevelNames( )[0];
}
if ( bindName.equals( yOptionBindName ) )
{
yOptionDimension = exprCodec.getLevelNames( )[0];
}
}
if ( ( categoryDimension != null && yOptionDimension != null && categoryDimension.equals( yOptionDimension ) ) )
{
return false;
}
else
{
return true;
}
} | java | 12 | 0.689632 | 114 | 30.880435 | 92 | /**
* Check if the expressions of category and Y optional have same dimension.
*
* @param checkType
* @param data
* @param queryDefinitionsMap
* @param itemHandle
* @param provider
*
* @since 2.5.1
*/ | function |
public class EmbeddedIdWithInheritanceTest extends CompilationTest {
@Test
@WithClasses({ Ref.class, AbstractRef.class, TestEntity.class })
@WithMappingFiles("orm.xml")
public void testEntityContainsEmbeddedIdProperty() {
assertMetamodelClassGeneratedFor( TestEntity.class );
assertPresenceOfFieldInMetamodelFor(
TestEntity.class, "ref", "Property ref should be in metamodel"
);
}
} | java | 9 | 0.790932 | 68 | 35.181818 | 11 | /**
* @author Hardy Ferentschik
*/ | class |
func (s *WorkItem2Suite) TestCreateAndUpdateWorkItemForEveryWIT() {
spaceTemplateRepo := spacetemplate.NewRepository(s.DB)
templates, err := spaceTemplateRepo.List(s.Ctx)
require.NoError(s.T(), err)
for _, templ := range templates {
s.T().Run(templ.Name, func(t *testing.T) {
if !templ.CanConstruct {
t.Skipf("skipping space template \"%s\" because it is marked as: \"cannot construct spaces\"", templ.Name)
}
witRepo := workitem.NewWorkItemTypeRepository(s.DB)
fxt := tf.NewTestFixture(s.T(), s.DB,
tf.CreateWorkItemEnvironment(),
tf.Spaces(1, func(fxt *tf.TestFixture, idx int) error {
fxt.Spaces[idx].SpaceTemplateID = templ.ID
return nil
}),
)
wits, err := witRepo.List(s.Ctx, fxt.Spaces[0].SpaceTemplateID)
require.NoError(t, err)
for _, wit := range wits {
t.Run(wit.Name, func(t *testing.T) {
if !wit.CanConstruct {
t.Skipf("skipping WIT \"%s\" because it is marked as: \" cannot construct work items\"", wit.Name)
}
var id uuid.UUID
c := minimumRequiredCreateWithType(wit.ID)
c.Data.Attributes[workitem.SystemTitle] = "WI of type " + wit.Name
stateDef, ok := wit.Fields[workitem.SystemState]
require.True(t, ok, "failed to get state definition from %+v", spew.Sdump(wit))
stateEnum, ok := stateDef.Type.(workitem.EnumType)
require.True(t, ok, "failed to get state enum from field definition")
require.NotEmpty(t, stateEnum.Values)
initialState, ok := stateEnum.Values[0].(string)
require.True(t, ok, "failed to get values from state enum")
c.Data.Relationships.Space.Data.ID = &fxt.Spaces[0].ID
_, item := test.CreateWorkitemsCreated(t, s.svc.Context, s.svc, s.workitemsCtrl, fxt.Spaces[0].ID, &c)
require.NotNil(t, item)
require.Equal(t, initialState, item.Data.Attributes[workitem.SystemState])
require.NotNil(t, item.Data.Relationships)
require.NotNil(t, item.Data.Relationships.BaseType)
require.NotNil(t, item.Data.Relationships.BaseType.Data)
require.Equal(t, wit.ID, item.Data.Relationships.BaseType.Data.ID)
id = *item.Data.ID
updatePayload := minimumRequiredUpdatePayload()
updatePayload.Data.ID = &id
updatePayload.Data.Attributes = item.Data.Attributes
updatePayload.Data.Attributes[workitem.SystemTitle] = "NEW TITLE"
_, updated := test.UpdateWorkitemOK(s.T(), s.svc.Context, s.svc, s.workitemCtrl, id, &updatePayload)
require.NotNil(t, updated)
})
}
})
}
} | go | 25 | 0.684486 | 110 | 45.092593 | 54 | // TestCreateAndUpdateWorkItemForEveryWIT does this:
//
// For each space template that can construct spaces, create a space based-off
// of that template. Then construct a work item of each work item type that can
// construct work items. Then instantiate a work item with only the title set in
// the payload (this is all the UI can do when creating a work item with the
// quick add). Then check that the created work item has the individual initial
// state as it was defined for the work item type of that work item. | function |
public final <T> T shallowCopy(T obj) {
long size = shallowSizeOf(obj);
long address = THE_UNSAFE.allocateMemory(size);
long start = toAddress(obj);
THE_UNSAFE.copyMemory(start, address, size);
@SuppressWarnings("unchecked")
final T result = (T) fromAddress(address);
return result;
} | java | 8 | 0.715719 | 49 | 32.333333 | 9 | /**
* Performs a shallow copy of the given object - a new instance is allocated with the same contents. Any object
* references inside the copy will be the same as the original object.
* @param obj Object to copy
* @param <T> The type being copied
* @return A new instance, identical to the original
*/ | function |
public boolean addNeighbours(Map map, HashMap<Integer, Country> countries,
HashMap<Integer, ArrayList<Integer>> borders, String country_name, String neighbour_country_name)
throws ValidMapException {
boolean country_flag = false;
boolean neighbour_flag = false;
int country_id = 0, neighbour_country_id = 0;
for (int n : countries.keySet()) {
String coun = countries.get(n).getCountryName();
if (country_name.equalsIgnoreCase(coun)) {
country_id = n;
country_flag = true;
break;
}
}
if (country_flag == true) {
for (int p : countries.keySet()) {
String neigh_country = countries.get(p).getCountryName();
if (neighbour_country_name.equalsIgnoreCase(neigh_country)) {
neighbour_country_id = p;
neighbour_flag = true;
break;
}
}
if (neighbour_flag == true) {
if (borders.size() > 0) {
if (borders.containsKey(country_id)) {
if (borders.get(country_id).contains(neighbour_country_id)) {
throw new ValidMapException("The neighbour country already exists in Borders List!");
} else {
borders.get(country_id).add(neighbour_country_id);
}
} else {
ArrayList<Integer> list = new ArrayList<Integer>();
list.add(neighbour_country_id);
borders.put(country_id, list);
}
} else {
ArrayList<Integer> list1 = new ArrayList<Integer>();
list1.add(neighbour_country_id);
borders.put(country_id, list1);
}
return true;
} else {
throw new ValidMapException("The neighbour country named " + neighbour_country_name
+ " does not exist. Please add the country and then try to add the neighbour");
}
} else {
throw new ValidMapException(
"The Country " + country_name + " does not exists. Please add the country and then neighbour");
}
} | java | 17 | 0.656181 | 100 | 34.54902 | 51 | /**
* This method adds borders to map
*
* @param map the map object
* @param borders the borders hashmap
* @param country_name the name of the country
* @param neighbour_country_name the name of the neighbour country
* @param countries for country
* @return true if border added, else false
* @throws ValidMapException handles exceptions
*/ | function |
func (l *LinkedList) Print() string {
v := ""
currentNode := l.Head
for currentNode != nil {
v = fmt.Sprintf("%v%v ", v, currentNode.Value)
currentNode = currentNode.Next
}
v = fmt.Sprintf("[%v]", strings.TrimSpace(v))
return v
} | go | 10 | 0.635983 | 48 | 23 | 10 | // Print returns the human-readable format of the linked list | function |
func (g *Geometry) UnmarshalJSON(b []byte) error {
var r rawGeometry
err := json.Unmarshal(b, &r)
if err != nil {
return err
}
g.Object = r.Object
g.rawGeometry = r
return g.setGeometry()
} | go | 8 | 0.661616 | 50 | 18.9 | 10 | // UnmarshalJSON will take a geometry GeoJSON string and appropriately fill in the
// specific geometry type | function |
private void HDisplayControl_Resize(object sender, EventArgs e)
{
UpdateHalconWindowExtents();
vScrollBar1.Location = new Point((viewPort.Location.X +
windowExtents.Width),
viewPort.Location.Y);
if (hWndControl.adaptSize)
{
if (this.Image != null)
hWndControl.resetImagePart(imageWidth, imageHeight);
}
else
{
if (this.Image != null)
setFullImageSize();
}
this.Invalidate();
} | c# | 13 | 0.470779 | 69 | 33.277778 | 18 | /// <summary>
/// Event handling for resizing the graphic window
/// </summary> | function |
void configureInterrupts() {
RCONbits.IPEN = 0;
INTCONbits.GIE = 1;
INTCONbits.PEIE = 1;
configureInput();
configureTimer();
} | c | 6 | 0.624161 | 28 | 20.428571 | 7 | /**
* Configure all the interrupts on this chip as needed by this application.
*/ | function |
@Override
public Parse filter(String url, WebPage page, Parse parse,
HTMLMetaTags metaTags, DocumentFragment doc) {
URL base;
try {
base = new URL(page.getBaseUrl().toString());
Walker.walk(doc, base, page, getConf());
} catch (Exception e) {
LOG.error("Error parsing " + url, e);
return ParseStatusUtils.getEmptyParse(e, getConf());
}
return parse;
} | java | 12 | 0.634568 | 60 | 30.230769 | 13 | /**
* Adds metadata or otherwise modifies a parse of an HTML document, given the
* DOM tree of a page.
*/ | function |
internal static ProblemResponse Map(Problem problem)
{
if (problem is null)
{
throw new ArgumentNullException(nameof(problem));
}
var holds = problem.ProblemHolds
.OrderBy(problemHold => problemHold.Position)
.Select(problemHold => new HoldOnProblemResponse(problemHold.Hold.Id, problemHold.Hold.Name,
problemHold.Hold.ParentHoldId, problemHold.IsStandingStartHold,
CreateHoldRuleResponse(problemHold)))
.ToList();
return new ProblemResponse(problem.Id, problem.Name, holds)
{
Description = problem.Description,
SetBy = problem.SetBy,
DateSet = problem.DateSet,
FirstAscent = problem.FirstAscent,
TechGrade = problem.TechGradeId is null ? null : Map(problem.TechGrade),
BGrade = problem.BGradeId is null ? null : Map(problem.BGrade),
PoveyGrade = problem.PoveyGradeId is null ? null : Map(problem.PoveyGrade),
FurlongGrade = problem.FurlongGradeId is null ? null : Map(problem.FurlongGrade),
Rules = problem.ProblemRules.Select(Map),
StyleSymbols = problem.ProblemStyleSymbols
.Select(problemStyleSymbol => new StyleSymbolResponse(problemStyleSymbol.StyleSymbolId,
problemStyleSymbol.StyleSymbol.Name,
problemStyleSymbol.StyleSymbol.Description))
};
} | c# | 20 | 0.526821 | 131 | 60.103448 | 29 | /// <summary>
/// Create a ProblemResponse DTO entity from a Problem entity
/// </summary>
/// <param name="problem">The entity to map from</param>
/// <returns>The corresponding DTO</returns>
/// <exception cref="ArgumentNullException"></exception> | function |
function handlePolygon(coords, polygons, indexMap, coordLength) {
polygons.polygonIndices[indexMap.polygonObject] = indexMap.polygonPosition;
indexMap.polygonObject++;
for (const ring of coords) {
polygons.primitivePolygonIndices[indexMap.polygonRing] = indexMap.polygonPosition;
indexMap.polygonRing++;
polygons.positions.set(flatten(ring), indexMap.polygonPosition * coordLength);
const nPositions = ring.length;
polygons.objectIds.set(
new Uint32Array(nPositions).fill(indexMap.feature),
indexMap.polygonPosition
);
indexMap.polygonPosition += nPositions;
}
} | javascript | 12 | 0.757377 | 86 | 39.733333 | 15 | // Fills Polygon coordinates into polygons object of arrays | function |
End of preview. Expand
in Dataset Viewer.
README.md exists but content is empty.
- Downloads last month
- 0