hexsha
stringlengths 40
40
| size
int64 140
1.03M
| ext
stringclasses 94
values | lang
stringclasses 21
values | max_stars_repo_path
stringlengths 3
663
| max_stars_repo_name
stringlengths 4
120
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
sequencelengths 1
10
| max_stars_count
int64 1
368k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
663
| max_issues_repo_name
stringlengths 4
120
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
sequencelengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
663
| max_forks_repo_name
stringlengths 4
135
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
sequencelengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 140
1.03M
| avg_line_length
float64 2.32
23.1k
| max_line_length
int64 11
938k
| alphanum_fraction
float64 0.01
1
| score
float32 3
4.25
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0c865092d1b024900a20974bf4805c2c027168ad | 1,337 | asm | Assembly | src/primes.asm | rprouse/8088 | 7cba221d5dd515144afa0d7bdd875f914e0e8c9a | [
"MIT"
] | null | null | null | src/primes.asm | rprouse/8088 | 7cba221d5dd515144afa0d7bdd875f914e0e8c9a | [
"MIT"
] | null | null | null | src/primes.asm | rprouse/8088 | 7cba221d5dd515144afa0d7bdd875f914e0e8c9a | [
"MIT"
] | null | null | null | ; Calculate primes using the Sieve of Eratosthenes
cpu 8086
bits 16
org 0x0100
table: equ 0x8000
table_size: equ 1000
jmp start
%include 'library.inc'
start:
mov bx,table
mov cx,table_size
mov al,0
; Initialize the memory in the table to zero
.zero_loop:
mov [bx],al ; Write AL to the address pointed to by BX
inc bx
loop .zero_loop ; Decrease CX and jump if non-zero
mov ax,2 ; Start at first prime, 2. AX is the prime we are testing
.check_prime:
mov bx,table ; Set BX to the table address
add bx,ax ; Add the last prime to BX
cmp byte [bx],0 ; Is it a prime? If it is still 0, we haven't marked it as a multiple
jne .next
push ax ; This is a prime, display it
call display_number
mov al,','
call chout
pop ax
mov bx,table
add bx,ax
.mark_multiples:
add bx,ax ; Next multiple of AX
cmp bx,table+table_size
jg .next ; Make sure we're not at the end of the table
mov byte [bx],1 ; Set the value as not-prime in the table
jmp .mark_multiples ; Back and multiply again
.next:
inc ax ; Increment AX to the next number to check
cmp ax,table_size ; Make sure we are not at the end
jne .check_prime
jmp exit | 25.711538 | 93 | 0.615557 | 3.28125 |
284b2688717d354ae6e7444f223b3fae0698eee2 | 1,513 | rb | Ruby | lib/cute_print/formatter.rb | wconrad/cute_print | 9df8f056579324d329030ef0bd6621b3f0fa2aa8 | [
"MIT"
] | 2 | 2015-06-19T17:31:24.000Z | 2017-09-27T19:44:50.000Z | lib/cute_print/formatter.rb | wconrad/cute_print | 9df8f056579324d329030ef0bd6621b3f0fa2aa8 | [
"MIT"
] | 5 | 2015-02-25T20:51:57.000Z | 2018-03-13T19:29:21.000Z | lib/cute_print/formatter.rb | wconrad/cute_print | 9df8f056579324d329030ef0bd6621b3f0fa2aa8 | [
"MIT"
] | null | null | null | require "pp"
require "stringio"
require_relative "format"
require_relative "labeler"
require_relative "location"
require_relative "location_label"
require_relative "source_label"
require_relative "values"
module CutePrint
# @api private
class Formatter
def initialize(opts = {})
@method = opts.fetch(:method)
@out = opts.fetch(:out)
@block = opts.fetch(:block, nil)
@args = opts.fetch(:values, [])
@values = Values.new(@args, @block)
@width = opts.fetch(:width)
@location_label = nil
end
def write
if @values.empty? && !label.empty?
write_line label.chomp(": ")
else
@values.each do |value|
labeler = Labeler.new(@format, @width, label, value)
write_lines labeler.labeled
end
end
end
def with_location(format_key)
location = Location.find
@location_label = LocationLabel.make(format_key, location)
end
def inspect
@format = Format::Inspect.new
end
def pretty_print
@format = Format::PrettyPrint.new
end
private
def write_lines(lines)
lines.each do |line|
write_line line
end
end
def write_line(line)
line += "\n" unless line =~ /\n\Z/
@out.print line
end
def label
@label ||= make_label
end
def make_label
[
(@location_label.to_s if @location_label),
(SourceLabel.new(@block, @method) if @block),
].compact.join
end
end
end
| 20.173333 | 64 | 0.611368 | 3.015625 |
0ce5d95f10a05417cb3b6fc154c24d7adc27cf45 | 1,877 | py | Python | scripts/baxter_find_tf.py | mkrizmancic/qlearn_baxter | 0498315212cacb40334cbb97a858c6ba317f52a3 | [
"MIT"
] | 4 | 2017-11-11T18:16:22.000Z | 2018-11-08T13:31:09.000Z | scripts/baxter_find_tf.py | mkrizmancic/qlearn_baxter | 0498315212cacb40334cbb97a858c6ba317f52a3 | [
"MIT"
] | null | null | null | scripts/baxter_find_tf.py | mkrizmancic/qlearn_baxter | 0498315212cacb40334cbb97a858c6ba317f52a3 | [
"MIT"
] | 2 | 2019-09-04T12:28:58.000Z | 2021-09-27T13:02:48.000Z | #!/usr/bin/env python
"""Calculate transformation matrices and broadcast transform from robot's base to head markers."""
import rospy
import tf
import math
from PyKDL import Vector, Frame, Rotation
if __name__ == '__main__':
rospy.init_node('baxter_find_transformation')
listener = tf.TransformListener()
br = tf.TransformBroadcaster()
rate = rospy.Rate(50)
while not rospy.is_shutdown():
try:
(trans_OH, rot_OH) = listener.lookupTransform('/optitrack', '/bax_head', rospy.Time(0))
(trans_OA, rot_OA) = listener.lookupTransform('/optitrack', '/bax_arm', rospy.Time(0))
(trans_BG, rot_BG) = listener.lookupTransform('/base', '/left_gripper_base', rospy.Time(0))
except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
continue
# Rotations
rot_OH = Rotation.Quaternion(*rot_OH)
rot_OA = Rotation.Quaternion(*rot_OA)
rot_BG = Rotation.Quaternion(*rot_BG)
rot_AG = Rotation.RPY(math.pi / 2, -math.pi, math.pi / 2)
# Creating Frames
T_OH = Frame(rot_OH, Vector(*trans_OH))
T_OA = Frame(rot_OA, Vector(*trans_OA))
T_BG = Frame(rot_BG, Vector(*trans_BG))
T_AG = Frame(rot_AG, Vector(0, 0, 0))
# Finding right transformation
T_HB = T_OH.Inverse() * T_OA * T_AG * T_BG.Inverse()
T_empty_p = Vector(0, 0, 0)
T_empty_Q = Rotation.Quaternion(0, 0, 0, 1)
T_empty = Frame(T_empty_Q, T_empty_p)
# Broadcast new transformations
br.sendTransform(T_HB.p, T_HB.M.GetQuaternion(), rospy.Time.now(), 'base', 'bax_head')
br.sendTransform(T_HB.p, T_HB.M.GetQuaternion(), rospy.Time.now(), 'reference/base', 'bax_head')
br.sendTransform(T_empty.p, T_empty.M.GetQuaternion(), rospy.Time.now(), 'world', 'base')
rate.sleep()
| 39.93617 | 104 | 0.64731 | 3.140625 |
ade1192e66419a4f1a0f70babfae972e654e2cc0 | 13,712 | lua | Lua | 3DreamEngine/loader/dae.lua | sewbacca/3DreamEngine | d688b7d04fd7ffdbedaa55b0d26785e78304bbca | [
"MIT"
] | 209 | 2019-04-01T20:58:05.000Z | 2022-03-30T20:02:26.000Z | 3DreamEngine/loader/dae.lua | sewbacca/3DreamEngine | d688b7d04fd7ffdbedaa55b0d26785e78304bbca | [
"MIT"
] | 54 | 2019-03-30T23:58:34.000Z | 2022-02-01T14:20:57.000Z | 3DreamEngine/loader/dae.lua | sewbacca/3DreamEngine | d688b7d04fd7ffdbedaa55b0d26785e78304bbca | [
"MIT"
] | 12 | 2019-03-31T09:50:25.000Z | 2022-03-03T09:52:04.000Z | --[[
#dae - COLLADA
--]]
--load space seperated arrays as floats or as strings
local function loadFloatArray(arr)
local t = { }
for w in arr:gmatch("%S+") do
t[#t+1] = tonumber(w)
end
return t
end
local function loadArray(arr)
local t = { }
for w in arr:gmatch("%S+") do
t[#t+1] = w
end
return t
end
--load entire tree and index all IDs
local indices
local localToGlobal
local function indexTree(node)
for key,child in pairs(node) do
if type(child) == "table" and key ~= "_attr" then
indexTree(child)
end
end
if node._attr and node._attr.id then
indices[node._attr.id] = node
indices["#" .. node._attr.id] = node
if node._attr.sid then
localToGlobal[node._attr.sid] = node._attr.id
end
end
end
return function(self, obj, path)
local xml2lua = require(self.root .. "/libs/xml2lua/xml2lua")
local handler = require(self.root .. "/libs/xml2lua/tree"):new()
--parse
local file = love.filesystem.read(path)
xml2lua.parser(handler):parse(file)
local correction = mat4:getRotateX(-math.pi/2)
local root = handler.root.COLLADA[1]
--get id indices
indices = { }
localToGlobal = { }
indexTree(root)
--load armatures and vertex weights
local armatures = { }
local controllers = { }
if root.library_controllers then
for d,s in ipairs(root.library_controllers[1].controller) do
if s.skin then
local name = s.skin[1]._attr.source:sub(2)
local a = {
weights = { },
joints = { },
jointIDs = { },
}
armatures[name] = a
controllers[s._attr.id] = name
--load sources
local weights = { }
for i,v in ipairs(s.skin[1].source) do
local typ = v.technique_common[1].accessor[1].param[1]._attr.name
if typ == "JOINT" then
a.jointIDs = loadArray(v.Name_array[1][1])
for d,s in ipairs(a.jointIDs) do
a.jointIDs[d] = localToGlobal[s] or s
end
elseif typ == "WEIGHT" then
weights = loadFloatArray(v.float_array[1][1])
end
end
--load weights
local vw = s.skin[1].vertex_weights[1]
local vcount = vw.vcount and loadFloatArray(vw.vcount[1][1]) or { }
local ids = loadFloatArray(vw.v[1][1])
local count = tonumber(vw._attr.count)
local fields = #vw.input
for _,input in ipairs(vw.input) do
local typ = input._attr.semantic
local offset = 1 + tonumber(input._attr.offset)
if typ == "JOINT" then
local ci = 1
for i = 1, count do
local verts = vcount[i] or 1
a.joints[i] = { }
for v = 1, verts do
local id = ids[(ci-1)*fields+offset]
a.joints[i][v] = id+1
ci = ci + 1
end
end
elseif typ == "WEIGHT" then
local ci = 1
for i = 1, count do
local verts = vcount[i] or 1
a.weights[i] = { }
for v = 1, verts do
local id = ids[(ci-1)*fields+offset]
a.weights[i][v] = weights[id+1]
ci = ci + 1
end
end
end
end
--normalize weights and limit to 4 (GPU limit)
for i = 1, #a.weights do
while #a.weights[i] > 4 do
local min, best = math.huge, 1
for d,s in ipairs(a.weights[i]) do
if s < min then
min = s
best = d
end
end
table.remove(a.joints[i], best)
table.remove(a.weights[i], best)
end
--normalize
local sum = 0
for d,s in ipairs(a.weights[i]) do
sum = sum + s
end
if sum > 0 then
for d,s in ipairs(a.weights[i]) do
a.weights[i][d] = s / sum
end
end
end
end
end
end
--load materials
if root.library_materials then
for _,mat in ipairs(root.library_materials[1].material) do
local name = mat._attr.name
local material = self:newMaterial(name)
obj.materials[name] = material
indices[mat._attr.id] = material
--load
if mat.instance_effect then
local effect = indices[mat.instance_effect[1]._attr.url]
--get first profile
local profile
for d,s in pairs(effect) do
profile = s[1]
end
--parse data
if profile then
for step, dataArr in pairs(profile.technique[1]) do
if step ~= "_attr" then
local data = dataArr[1]
if data.emission then
local e = data.emission[1]
if e.color then
local color = loadFloatArray( e.color[1][1] )
material.emission = {color[1] * color[4], color[2] * color[4], color[3] * color[4]}
end
end
if data.diffuse then
local d = data.diffuse[1]
if d.color then
local color = loadFloatArray( d.color[1][1] )
material.color = color
end
end
if data.specular then
local s = data.specular[1]
if s.color then
local color = loadFloatArray( s.color[1][1] )
material.specular = math.sqrt(color[1]^2 + color[2]^2 + color[3]^2)
end
end
if data.shininess then
material.glossiness = tonumber( data.shininess[1].float[1][1] )
end
if data.index_of_refraction then
material.ior = tonumber( data.index_of_refraction[1].float[1][1] )
end
end
end
end
end
end
end
--load main geometry
local meshData = { }
for d,geo in ipairs(root.library_geometries[1].geometry) do
local mesh = geo.mesh[1]
local id = geo._attr.id
meshData[id] = meshData[id] or { }
--translation table
local translate = {
["VERTEX"] = "vertices",
["NORMAL"] = "normals",
["TEXCOORD"] = "texCoords",
["COLOR"] = "colors",
}
--parse vertices
local o
local lastMaterial
local index = 0
local edges = { }
for typ = 1, 3 do
local list
if typ == 1 then
list = mesh.triangles
elseif typ == 2 then
list = mesh.polylist
else
list = mesh.polygons
end
if list then
for _,l in ipairs(list) do
local mat = indices[l._attr.material] or obj.materials.None
local material = self.materialLibrary[mat.name] or mat
if obj.args.splitMaterials then
o = self:newSubObject(geo._attr.id, obj, material)
meshData[id][#meshData[id]+1] = o
index = 0
elseif not o then
o = self:newSubObject(geo._attr.id, obj, material)
meshData[id][#meshData[id]+1] = o
end
--connect with armature
if armatures[o.name] and not o.weights then
o.weights = { }
o.joints = { }
o.jointIDs = armatures[o.name].jointIDs
end
--ids of source components per vertex
local ids
local vcount
if typ == 3 then
ids = { }
vcount = { }
--combine polygons
for _,p in ipairs(l.p) do
local a = loadFloatArray(p[1])
for _,v in ipairs(a) do
ids[#ids+1] = v
end
vcount[#vcount+1] = #a
end
else
ids = loadFloatArray(l.p[1][1])
vcount = l.vcount and loadFloatArray(l.vcount[1][1]) or { }
end
--get max offset
local fields = 0
for d,input in ipairs(l.input) do
fields = tonumber(input._attr.offset) + 1
end
--parse data arrays
local verticeIndex = { }
for d,input in ipairs(l.input) do
local f = translate[input._attr.semantic]
if f then
local s = loadFloatArray( (indices[input._attr.source].input and indices[ indices[input._attr.source].input[1]._attr.source ] or indices[input._attr.source]).float_array[1][1] )
for i = 1, #ids / fields do
local id = ids[(i-1)*fields + tonumber(input._attr.offset) + 1]
if f == "texCoords" then
--xy vector
o[f][index+i] = {
s[id*2+1],
1.0-s[id*2+2],
}
elseif f == "colors" then
--rgba vector
o[f][index+i] = {
s[id*4+1],
s[id*4+2],
s[id*4+3],
s[id*4+4],
}
else
--xyz vectors
o[f][index+i] = {
s[id*3+1],
s[id*3+2],
s[id*3+3]
}
if f == "vertices" then
verticeIndex[index+i] = id
end
--also connect weight and joints
if f == "vertices" and o.weights then
o.weights[index+i] = armatures[o.name].weights[id+1]
o.joints[index+i] = armatures[o.name].joints[id+1]
o.materials[index+i] = material
end
end
end
end
end
--parse polygons
local count = l._attr.count
local i = index+1
for face = 1, count do
local verts = vcount[face] or 3
--store edges
for v = 1, verts do
local a, b = i + v - 1, v == verts and i or (i + v)
local min = math.min(verticeIndex[a], verticeIndex[b])
local max = math.max(verticeIndex[a], verticeIndex[b])
local id = min * 65536 + max
if not edges[id] then
edges[id] = true
o.edges[#o.edges+1] = {a, b}
end
end
if verts == 3 then
--tris
o.faces[#o.faces+1] = {i, i+1, i+2}
else
--triangulates, fan style
for f = 1, verts-2 do
o.faces[#o.faces+1] = {i, i+f, i+f+1}
end
end
i = i + verts
end
index = #o.vertices
end
end
end
end
--load light
local lightIDs = { }
if root.library_lights then
for d,light in ipairs(root.library_lights[1].light) do
local l = self:newLight()
lightIDs[light._attr.id] = l
if light.extra and light.extra[1] and light.extra[1].technique and light.extra[1].technique[1] then
local dat = light.extra[1].technique[1]
l:setColor(dat.red and tonumber(dat.red[1][1]) or 1.0, dat.green and tonumber(dat.green[1][1]) or 1.0, dat.blue and tonumber(dat.blue[1][1]) or 1.0)
l:setBrightness(dat.energy and tonumber(dat.energy[1][1]) or 1.0)
end
table.insert(obj.lights, l)
end
end
local function addObject(name, mesh, transform)
for _,subObject in ipairs(meshData[mesh]) do
local id = name
if obj.args.splitMaterials then
id = id .. "_" .. subObject.material.name
end
obj.objects[id] = subObject:clone()
obj.objects[id].name = name
obj.objects[id].transform = correction * transform
end
end
--load scene
for d,s in ipairs(root.library_visual_scenes[1].visual_scene[1].node) do
obj.joints = { }
if s.instance_geometry then
--object
local id = s.instance_geometry[1]._attr.url:sub(2)
local name = s._attr.name or s._attr.id
local transform = mat4(loadFloatArray(s.matrix[1][1]))
addObject(name, id, transform)
elseif s.instance_light then
local transform = correction * mat4(loadFloatArray(s.matrix[1][1]))
local l = lightIDs[s.instance_light[1]._attr.url:sub(2)]
l:setPosition(transform[4], transform[8], transform[12])
elseif s._attr.name == "Armature" then
--propably an armature
--TODO: not a proper way to identify armature nodes
local function skeletonLoader(nodes, parentTransform)
local skel = { }
for d,s in ipairs(nodes) do
if s.instance_controller then
--object associated with skeleton
local id = s.instance_controller[1]._attr.url:sub(2)
local mesh = controllers[id]
local name = s._attr.name or s._attr.id
local transform = mat4(loadFloatArray(s.matrix[1][1]))
addObject(name, mesh, transform)
end
if s._attr.type == "JOINT" then
local name = s._attr.id
local m = mat4(loadFloatArray(s.matrix[1][1]))
local bindTransform = parentTransform and parentTransform * m or m
skel[name] = {
name = name,
bindTransform = m,
inverseBindTransform = bindTransform:invert(),
}
obj.joints[name] = skel[name]
if s.node then
skel[name].children = skeletonLoader(s.node, bindTransform)
end
end
end
return skel
end
obj.skeleton = skeletonLoader(s.node)
break
end
end
--load animations
if root.library_animations then
local animations = { }
local function loadAnimation(anim)
for _,a in ipairs(anim) do
if a.animation then
loadAnimation(a.animation)
else
local keyframes = { }
local name = a.channel[1]._attr.target:sub(1, -11)
--parse sources
local sources = { }
for d,s in ipairs(a.source) do
sources[s._attr.id] = s.float_array and loadFloatArray(s.float_array[1][1]) or s.Name_array and loadArray(s.Name_array[1][1])
end
for d,s in ipairs(a.sampler[1].input) do
sources[s._attr.semantic] = sources[s._attr.source:sub(2)]
end
--get matrices
local frames = { }
local positions = { }
for i = 1, #sources.OUTPUT / 16 do
local m = mat4(unpack(sources.OUTPUT, i*16-15, i*16))
frames[#frames+1] = {
time = sources.INPUT[i],
--interpolation = sources.INTERPOLATION[i],
rotation = quat.fromMatrix(m:subm()),
position = vec3(m[4], m[8], m[12]),
}
end
--pack
animations[name] = frames
end
end
end
loadAnimation(root.library_animations[1].animation)
--split animations
if obj.args.animations then
obj.animations = { }
obj.animationLengths = { }
for anim, time in pairs(obj.args.animations) do
obj.animations[anim] = { }
obj.animationLengths[anim] = time[2] - time[1]
for joint, frames in pairs(animations) do
local newFrames = { }
for i, frame in ipairs(frames) do
if frame.time >= time[1] and frame.time <= time[2] then
table.insert(newFrames, frame)
end
end
obj.animations[anim][joint] = newFrames
end
end
else
obj.animations = {
default = animations,
}
obj.animationLengths = {
default = animations[#animations].time,
}
end
end
end | 26.573643 | 184 | 0.589192 | 3.296875 |
f024f2d1468cd63a89d1e5336dc2508a4542b04f | 1,476 | py | Python | Stack/10-stack-special-design-and-implement.py | mahmutcankurt/DataStructures_Python | bfb81e3530b535c4e48c07548dc4a4f9a648bab2 | [
"MIT"
] | 1 | 2022-01-25T22:17:55.000Z | 2022-01-25T22:17:55.000Z | Stack/10-stack-special-design-and-implement.py | mahmutcankurt/DataStructures_Python | bfb81e3530b535c4e48c07548dc4a4f9a648bab2 | [
"MIT"
] | null | null | null | Stack/10-stack-special-design-and-implement.py | mahmutcankurt/DataStructures_Python | bfb81e3530b535c4e48c07548dc4a4f9a648bab2 | [
"MIT"
] | null | null | null | class Stack:
def __init__(self):
self.array = []
self.top = -1
self.max = 100
def isEmpty(self):
if(self.top == -1):
return True
else:
return False
def isFull(self):
if(self.top == self.max -1):
return True
else:
return False
def push(self, data):
if(self.isFull()):
print("Stack Overflow")
return
else:
self.top += 1
self.array.append(data)
def pop(self):
if(self.isEmpty()):
print("Stack Underflow")
return
else:
self.top -= 1
return(self.array.pop())
class SpecialStack(Stack):
def __init__(self):
super().__init__()
self.Min = Stack()
def push(self, x):
if(self.isEmpty):
super().push(x)
self.Min.push(x)
else:
super().push(x)
y = self.Min.pop()
self.Min.push(y)
if(x <= y):
self.Min.push(x)
else:
self.Min.push(y)
def pop(self):
x = super().pop()
self.Min.pop()
return x
def getMin(self):
x = self.Min.pop()
self.Min.push(x)
return x
if __name__ == "__main__":
s = SpecialStack()
s.push(10)
s.push(20)
s.push(30)
print(s.getMin())
s.push(5)
print(s.getMin()) | 20.219178 | 36 | 0.443767 | 3.5 |
2069a8783ef5257f23ae89a2c54877facee8a7e6 | 1,961 | lua | Lua | lua/twilight/colors/init.lua | jzone1366/twilight.nvim | da72643da7b73745ce5b56dd79340446949acf7f | [
"MIT"
] | 1 | 2022-03-14T23:15:29.000Z | 2022-03-14T23:15:29.000Z | lua/twilight/colors/init.lua | jzone1366/twilight.nvim | da72643da7b73745ce5b56dd79340446949acf7f | [
"MIT"
] | 1 | 2022-03-15T08:23:39.000Z | 2022-03-15T14:31:22.000Z | lua/twilight/colors/init.lua | jzone1366/twilight.nvim | da72643da7b73745ce5b56dd79340446949acf7f | [
"MIT"
] | null | null | null | local M = {}
M.styles = {
"light",
"dark",
}
-- Adds subtle and harsh colors depending if the colors are dark or light
-- @param colors table
-- @return table of colors
local function construct(colors)
colors.harsh = colors.meta.light and colors.black or colors.white
colors.subtle = colors.meta.light and colors.white or colors.black
return colors
end
-- Returns a color table based on the name provided
-- This returns the initial colors defined by the colorscheme
-- without overrides from the configuration
-- If name is not found will default to light
-- If the style is invalid the it will return light colors
-- @param name string (optional)
-- @return table of colors
function M.init(name)
name = name or require("twilight.config").options.style
if name == "random" then
local index = math.random(#M.styles)
return construct(require("twilight.colors." .. M.styles[index]).init())
end
for _, style in ipairs(M.styles) do
if style == name then
return construct(require("twilight.colors." .. name).init())
end
end
require("twilight.util").warn("colorscheme " .. name .. " was not found")
return construct(require("twilight.colors.light").init())
end
-- Return color table based on the name provided
-- If no name is provided it will return the style set in the config
-- If the style defined in the configuration is invalid the it will return light colors
-- @param name string (optional)
-- @return table of colors
function M.load(name)
name = name or require("twilight.config").options.style
if name == "random" then
local index = math.random(#M.styles)
return construct(require("twilight.colors." .. M.styles[index]).load())
end
for _, style in ipairs(M.styles) do
if style == name then
return construct(require("twilight.colors." .. name).load())
end
end
require("twilight.util").warn("colorscheme " .. name .. " was not found")
return construct(require("twilight.colors.light").load())
end
return M
| 29.712121 | 87 | 0.720551 | 3.421875 |
43c9ae59a393ebfbeb768d3575fb36a4c0db3588 | 1,578 | go | Go | storage/s3_func.go | OgreCase/kit | bebf00292e30262a2fc33b0c544e3e3de27194de | [
"Apache-2.0"
] | null | null | null | storage/s3_func.go | OgreCase/kit | bebf00292e30262a2fc33b0c544e3e3de27194de | [
"Apache-2.0"
] | null | null | null | storage/s3_func.go | OgreCase/kit | bebf00292e30262a2fc33b0c544e3e3de27194de | [
"Apache-2.0"
] | 1 | 2022-01-10T09:13:38.000Z | 2022-01-10T09:13:38.000Z | package storage
import (
"bytes"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/session"
"github.com/aws/aws-sdk-go/service/s3"
"github.com/globalsign/mgo/bson"
"mime/multipart"
"net/http"
"path/filepath"
)
func UploadFileToS3(s *session.Session, fileHeader *multipart.FileHeader) (string, error) {
// get the file size and read
// the file content into a buffer
size := fileHeader.Size
buffer := make([]byte, size)
file, err := fileHeader.Open()
if err != nil {
return "", err
}
file.Read(buffer)
// create a unique file name for the file
// 此处文件名称即为上传到aws bucket的名称,也是文件url路径的一部分。也可以在此处拼接url全路径,把域名部分拼接在前边即可。
tempFileName := "pictures/" + bson.NewObjectId().Hex() + filepath.Ext(fileHeader.Filename)
// config settings: this is where you choose the bucket,
// filename, content-type and storage class of the file
// you're uploading
_, err = s3.New(s).PutObject(&s3.PutObjectInput{
Bucket: aws.String("test-bucket"), // bucket名称,把自己创建的bucket名称替换到此处即可
Key: aws.String(tempFileName),
ACL: aws.String("public-read"), // could be private if you want it to be access by only authorized users
Body: bytes.NewReader(buffer),
ContentLength: aws.Int64(int64(size)),
ContentType: aws.String(http.DetectContentType(buffer)),
ContentDisposition: aws.String("attachment"),
ServerSideEncryption: aws.String("AES256"),
StorageClass: aws.String("INTELLIGENT_TIERING"),
})
if err != nil {
return "", err
}
return tempFileName, err
}
| 32.204082 | 123 | 0.680608 | 3.140625 |
0cdcd31b1d541c0b2fc7fa87f9fe6a1fb877291b | 4,997 | py | Python | rdsslib/kinesis/client.py | JiscSD/rdss-shared-libraries | cf07cad3f176ef8be1410fc29b240fb4791e607a | [
"Apache-2.0"
] | null | null | null | rdsslib/kinesis/client.py | JiscSD/rdss-shared-libraries | cf07cad3f176ef8be1410fc29b240fb4791e607a | [
"Apache-2.0"
] | 4 | 2018-02-15T12:32:26.000Z | 2018-03-06T16:33:34.000Z | rdsslib/kinesis/client.py | JiscSD/rdss-shared-libraries | cf07cad3f176ef8be1410fc29b240fb4791e607a | [
"Apache-2.0"
] | 1 | 2018-03-13T19:38:54.000Z | 2018-03-13T19:38:54.000Z | import json
import logging
from .errors import MaxRetriesExceededException, DecoratorApplyException
MAX_ATTEMPTS = 6
class KinesisClient(object):
def __init__(self, writer, reader):
"""
Writes and reads messages to and from Kinesis streams
:param writer: handles writing of payloads to Kinesis stream
:param reader: handles reading of payloads from Kinesis stream
:type writer: writer.StreamWriter
:type reader: reader.StreamReader
"""
self.logger = logging.getLogger(__name__)
self.logger.setLevel(logging.INFO)
self.writer = writer
self.reader = reader
def write_message(self, stream_names, payload, max_attempts=MAX_ATTEMPTS):
"""Write a payload into each stream in stream_names
:param stream_names: Kinesis streams to write to
:param payload: JSON payload
:param max_attempts: maximum number of times to attempt writing
:type stream_names: list of str
:type payload: str
"""
for stream_name in stream_names:
self.writer.put_stream(stream_name, payload, max_attempts)
def read_messages(self, stream_name, seq_number=None):
"""Continuous loop that reads messages from stream_name
:param stream_name: Name of Kinesis stream to read from
:param seq_number: Optional seq number
:type stream_name: str
:return message_gen: Yields messages read from Kinesis stream
:rtype message_gen: generator
"""
message_gen = self.reader.read_stream(
stream_name, seq_number=seq_number)
return message_gen
class EnhancedKinesisClient(KinesisClient):
def __init__(self, writer, reader, error_handler, decorators=None):
"""
Writes and reads messages to and from Kinesis streams with
error handling and message decoration
:param writer: Writes messages to Kinesis stream
:param reader: Reads messages from Kinesis stream
:param error_handler: Handles messages with errors
:param decorators: Enhance messages with extra fields
:type writer: writer.StreamWriter
:type reader: reader.StreamReader
:type error_handler: handlers.MessageErrorHandler
:type decorators: list
"""
super().__init__(writer, reader)
if decorators:
self.decorators = decorators
else:
self.decorators = []
self.error_handler = error_handler
def _apply_decorators(self, payload):
"""
Applies a sequence of decorators that
enhance and modify the contents of a payload
:param payload: Undecorated JSON payload
:type payload: str
:return payload: Decorated JSON payload
:rtype payload: str
"""
decorated_payload = payload
for decorator in self.decorators:
try:
decorated_payload = decorator.process(payload)
except Exception:
self.logger.warning(
'Failed to apply decorator {}'.format(decorator.name))
raise DecoratorApplyException()
return decorated_payload
def write_message(self, stream_names, payload, max_attempts=MAX_ATTEMPTS):
"""Write a payload into each stream in stream_names
:param stream_names: Kinesis streams to write to
:param payload: JSON payload
:param max_attempts: Max number of times to attempt writing
:type stream_names: list of str
:type payload: str
:type max_attempts: int
"""
try:
json.loads(payload)
except json.decoder.JSONDecodeError:
self.error_handler.handle_invalid_json(payload)
return
decorated_payload = self._apply_decorators(payload)
for stream_name in stream_names:
try:
super().write_message([stream_name],
decorated_payload,
max_attempts)
except MaxRetriesExceededException as e:
stream_name = e.args[0]
error_code = 'GENERR005'
error_description = 'Maximum retry attempts {0} exceed'\
'for stream {1}'.format(max_attempts,
stream_name)
self.error_handler.handle_error(decorated_payload,
error_code,
error_description)
def handle_error(self, payload, error_code, error_description):
""" Allows errors to be posted to the stream occurring from
activities like payload validation
:param payload: JSON payload
:param error_code: Error Code
:param error_description: Description Of Error
"""
self.error_handler.handle_error(payload, error_code, error_description)
| 39.346457 | 79 | 0.626976 | 3.34375 |
70b4a560218bd2b4ae7350c0aabd5d5072a724e9 | 2,089 | go | Go | pkg/util/file/volatile_file/volatile_file.go | dizzy57/flow | cc1282eb8a54943686115a95468101835cdce481 | [
"MIT"
] | null | null | null | pkg/util/file/volatile_file/volatile_file.go | dizzy57/flow | cc1282eb8a54943686115a95468101835cdce481 | [
"MIT"
] | null | null | null | pkg/util/file/volatile_file/volatile_file.go | dizzy57/flow | cc1282eb8a54943686115a95468101835cdce481 | [
"MIT"
] | null | null | null | package file
import (
"fmt"
"io/ioutil"
"sync"
log "github.com/sirupsen/logrus"
event "github.com/awesome-flow/flow/pkg/util/file/event"
"github.com/fsnotify/fsnotify"
)
const (
VFPermDefault = 0644
)
type VolatileFile struct {
path string
once *sync.Once
watcher *fsnotify.Watcher
notify chan *event.Event
}
func New(path string) (*VolatileFile, error) {
w, err := fsnotify.NewWatcher()
if err != nil {
return nil, err
}
vf := &VolatileFile{
path: path,
once: &sync.Once{},
watcher: w,
notify: make(chan *event.Event),
}
return vf, nil
}
func (vf *VolatileFile) Deploy() error {
log.Infof("Deploying a watcher for path: %s", vf.path)
vf.once.Do(func() {
go func() {
for ntf := range vf.watcher.Events {
log.Infof("Received a new fsnotify notification: %s", ntf)
switch ntf.Op {
case fsnotify.Create:
vf.notify <- event.New(event.Create)
case fsnotify.Write:
vf.notify <- event.New(event.Update)
case fsnotify.Remove:
vf.notify <- event.New(event.Delete)
default:
log.Infof("Ignored event: %s", ntf.String())
}
}
}()
vf.watcher.Add(vf.path)
})
return nil
}
func (vf *VolatileFile) TearDown() error {
log.Infof("Removing the watcher for path: %s", vf.path)
return vf.watcher.Remove(vf.path)
}
func (vf *VolatileFile) ReadRawData() ([]byte, error) {
rawData, err := ioutil.ReadFile(vf.path)
if err != nil {
return nil, err
}
return rawData, nil
}
func (vf *VolatileFile) ReadData() (interface{}, error) {
return vf.ReadRawData()
}
func (vf *VolatileFile) WriteData(data interface{}) error {
rawData, err := vf.EncodeData(data)
if err != nil {
return err
}
return ioutil.WriteFile(vf.path, rawData, VFPermDefault)
}
func (vf *VolatileFile) GetPath() string {
return vf.path
}
func (vf *VolatileFile) GetNotifyChan() chan *event.Event {
return vf.notify
}
func (vf *VolatileFile) EncodeData(data interface{}) ([]byte, error) {
if byteData, ok := data.([]byte); ok {
return byteData, nil
}
return nil, fmt.Errorf("Failed to convert data to []byte")
}
| 20.281553 | 70 | 0.662518 | 3.03125 |
14992220885c7a8d417972337b8a383c2ae2eb5f | 2,756 | lua | Lua | Aetheri/aetheri/species/appearance.lua | cuteBoiButt/sb.StardustSuite | 3c442c94192df257f46e08afc9f3ff8b5a6f2016 | [
"MIT"
] | 30 | 2016-09-17T21:28:00.000Z | 2022-03-31T04:59:51.000Z | Aetheri/aetheri/species/appearance.lua | cuteBoiButt/sb.StardustSuite | 3c442c94192df257f46e08afc9f3ff8b5a6f2016 | [
"MIT"
] | 22 | 2016-10-16T01:37:24.000Z | 2021-11-29T20:47:52.000Z | Aetheri/aetheri/species/appearance.lua | cuteBoiButt/sb.StardustSuite | 3c442c94192df257f46e08afc9f3ff8b5a6f2016 | [
"MIT"
] | 14 | 2016-12-17T18:59:03.000Z | 2022-03-03T00:58:22.000Z | -- handles all appearance and animation apart from the HUD
require "/lib/stardust/color.lua"
appearance = {
baseDirectives = "",
}
local bodyReplacePalette = {
"dafafafa", "caeaeafa", "badadafa", "aacacafa"
}
local function generatePalette(tbl)
local hue = tbl[1]
local sat = tbl[2]
local lumBright = tbl[3]
local lumDark = tbl[4]
return {
color.toHex(color.fromHsl{ hue, sat, lumBright }),
color.toHex(color.fromHsl{ hue, sat, util.lerp(1/3, lumBright, lumDark) }),
color.toHex(color.fromHsl{ hue, sat, util.lerp(2/3, lumBright, lumDark) }),
color.toHex(color.fromHsl{ hue, sat, lumDark })
}
end
local directives = ""
local updateGlow
function appearance.updateColors()
appearance.settings = status.statusProperty("aetheri:appearance", { })
local a = appearance.settings
if not a.coreHsl then
local name = world.entityName(entity.id())
a.coreHsl = { -- start with a randomized core color based on your name!
sb.staticRandomDoubleRange(0.0, 1.0, name, "core hue"), -- random hue
1.0 - sb.staticRandomDoubleRange(0.0, 1.0, name, "core saturation")^2, -- biased toward saturated
math.min(1, sb.staticRandomI32Range(0, 4, name, "bright or dark?")), -- 1 in 5 chance to start dark
sb.staticRandomDoubleRange(0.3, 0.7, name, "border brightness")
}
--playerext.message("generated values: " .. util.tableToString(a.coreHsl))
end
a.palette = generatePalette(a.coreHsl)
a.glowColor = color.fromHsl {
a.coreHsl[1],
a.coreHsl[2],
0.5 + (((a.coreHsl[3] + a.coreHsl[4]) / 2) - 0.5) * 0.5 -- average luma, pushed towards 0.5 (full vivid)
}
status.setStatusProperty("aetheri:appearance", a)
local d = {
"?replace;663b14fe=00000000;8d581cfe=00000000;c88b28fe=00000000;e7c474fe=00000000;404040fe=00000000;808080fe=00000000;6d0103fe=00000000;02da37fe=00000000;5786fffe=00000000",
color.replaceDirective(bodyReplacePalette, a.palette, true),
}
appearance.baseDirectives = table.concat(d)
tech.setParentDirectives(appearance.baseDirectives)
playerext.setGlowColor(color.lightColor(a.glowColor, 0.8))
world.sendEntityMessage(entity.id(), "aetheri:paletteChanged")
world.sendEntityMessage(entity.id(), "startech:refreshEnergyColor")
updateGlow = true
end
function appearance.update(p)
if updateGlow then
updateGlow = false
local a = appearance.settings
playerext.setGlowColor(color.lightColor(a.glowColor, 0.8))
end
end
-- register these here since this is executed during techstub init
message.setHandler("aetheri:refreshAppearance", appearance.updateColors)
message.setHandler("startech:getEnergyColor", function()
local p = appearance.settings.palette
return { p[1], p[3], p[4] } -- somewhat cut down palette
end)
| 35.333333 | 177 | 0.714078 | 3.125 |
74d046024ccef2a8077c21d99f32704efcf988c9 | 6,818 | js | JavaScript | myqq-webapp/src/views/Login/index.js | WHUT-XGP/chat-webapp | 53eba4e223411ca5b988857c56a38cc962d3c27e | [
"MIT"
] | 23 | 2020-12-25T08:39:11.000Z | 2022-03-23T07:12:23.000Z | myqq-webapp/src/views/Login/index.js | WHUT-XGP/chat-webapp | 53eba4e223411ca5b988857c56a38cc962d3c27e | [
"MIT"
] | 1 | 2022-01-02T14:31:28.000Z | 2022-01-02T14:31:28.000Z | myqq-webapp/src/views/Login/index.js | WHUT-XGP/chat-webapp | 53eba4e223411ca5b988857c56a38cc962d3c27e | [
"MIT"
] | null | null | null | import React, { useState, useEffect, useCallback } from 'react'
import { connect } from 'react-redux'
// 导入store相关
import { actionCreator } from './store'
// 导入CSS
import { LoginStyle } from './style'
// 导入组件
import Icon from '../../components/context/Icon'
import LoginInput from '../../components/common/LoginInput'
import Dialog from '../../components/common/Dialog'
import Loading from '../../components/common/loading'
import Toast from '../../components/common/Toast'
import { getInfo } from '../../api/LoginRequest'
function Login(props) {
// 登录用户名和密码
const { loading, error, history, register, token } = props
const { getLogin, changeToken, changeLoading, changeIsError, registerUser, changeRegister } = props;
const [password, setPassword] = useState('');
const [username, setUsername] = useState('');
const [isLoginStatus, setIsLoginStatus] = useState(true);
const [confirmPassword, setConfirmPassword] = useState('');
const [toast, setToast] = useState(false);
const [content, setContent] = useState('');
// 设置错误提示事件
// 通过useCallback改写
const changeToast = useCallback((content) => {
setContent(content)
setToast(true)
// 两秒后消失
setTimeout(() => {
setToast(false)
}, 2000);
}, [setToast, setContent])
// 从本地获取token
useEffect(() => {
const localToken = localStorage.getItem('token');
if (localToken) {
changeToken(localToken);
}
}, [changeToken])
// 登录成功的逻辑处理
useEffect(() => {
if (token) {
// 存进本地
getInfo('', token).then(() => {
localStorage.setItem('token', token)
history.push('/home/message')
}).catch((err) => {
console.log(err)
})
}
}, [token, history])
// 中途出错的逻辑处理
useEffect(() => {
if (error) {
changeToast(isLoginStatus ? '密码或用户名错误' : '用户名已存在')
// 重置
changeIsError(false)
}
}, [error, changeIsError, isLoginStatus,changeToast])
// 注册成功
useEffect(() => {
if (register) {
changeToast('恭喜你! 注册成功!')
changeRegister(false);
setTimeout(() => {
setIsLoginStatus(true);
}, 500);
}
}, [register, changeRegister,changeToast])
return (
<LoginStyle>
{/**标志 */}
<div className="icon-box">
<a href="/"><Icon xlinkHref='#icon-crew_react-copy'></Icon></a>
<span>MyQQ</span>
</div>
{/**登录输入框 */}
{
isLoginStatus && (<div className="input-box">
<LoginInput xlinkHref='#icon-morentouxiang' type="text" value={username} handleInput={(e) => {
setUsername(e)
}} placeHolder="请输入用户名" />
<LoginInput xlinkHref='#icon-mima' type="password" value={password} placeHolder="请输入密码" handleInput={(e) => {
setPassword(e)
}} />
</div>)
}
{/**注册输入框 */}
{
!isLoginStatus && (<div className="input-box">
<LoginInput xlinkHref='#icon-morentouxiang' type="text" value={username} handleInput={(e) => {
setUsername(e)
}} placeHolder="请输入用户名" />
<LoginInput xlinkHref='#icon-mima' type="password" value={password} placeHolder="请输入密码" handleInput={(e) => {
setPassword(e)
}} />
<LoginInput xlinkHref={confirmPassword === "" ? "#icon-crew_react" : confirmPassword === password ? '#icon-querenmima' : '#icon-cuowu'} type="password" value={confirmPassword} placeHolder="确认密码" handleInput={(e) => {
setConfirmPassword(e)
}} />
</div>)
}
{/**控制按钮 */}
<div className='button-go' style={{ animation: loading ? "circle 1s linear infinite" : "" }} onClick={() => {
if (isLoginStatus) {
// 登录 通过redux获取数据
if (username && password) {
getLogin(username, password)
changeLoading(true)
} else {
changeToast('信息不足,请完成填写')
}
} else {
// 注册
if (username && password && password === confirmPassword) {
registerUser(username, password)
changeLoading(true);
} else {
changeToast('请完成填写')
}
}
}} >
<Icon xlinkHref='#icon-denglu' size="1.3rem" />
</div>
{/**切换按钮 */}
<span style={{ marginTop: '1rem', fontSize: "0.8rem", textDecoration: 'underline', color: '#3F91CF' }} onClick={() => {
setIsLoginStatus(!isLoginStatus)
}}
>{isLoginStatus ? '点我注册' : '切换登录'}</span>
{/**加载提示组件 */}
<Dialog open={props.loading} title='加载中...' >
<Loading />
</Dialog>
{/** 轻提示组件*/}
<Toast open={toast} content={content}></Toast>
</LoginStyle>
)
}
// 配置redux映射关系
const mapStateToProps = (state) => {
return {
token: state.LoginReducer.token,
userInfo: state.LoginReducer.userInfo,
loading: state.LoginReducer.loading,
isLogin: state.LoginReducer.isLogin,
error: state.LoginReducer.isError,
register: state.LoginReducer.isRegister
}
}
const mapDispatchToProps = (dispatch) => {
return {
getLogin: (username, password) => {
dispatch(actionCreator.getLogin(username, password))
},
getInfo: (username) => {
dispatch(actionCreator.getUserInfo(username))
},
changeToken: (token) => {
dispatch(actionCreator.tokenChange(token))
},
changeLoading: (status) => {
dispatch(actionCreator.changeLoadingStatus(status))
},
changeIsLogin: (status) => {
dispatch(actionCreator.changeIsLoginStatus(status))
},
changeIsError: (status) => {
dispatch(actionCreator.changeErrorStatus(status))
},
registerUser: (username, password) => {
dispatch(actionCreator.getRegister(username, password))
},
changeRegister: (status) => {
dispatch(actionCreator.changeRegisterStatus(status))
}
}
}
export default connect(mapStateToProps, mapDispatchToProps)(React.memo(Login)) | 35.510417 | 236 | 0.512174 | 3.078125 |
0be4094ec9c88b491ea00f03e9587e97033d9ed4 | 5,404 | js | JavaScript | src/app/main/registros/Registros.js | lucianoarmoa98/indufar_prospeccion_medica-campos-recetas-main | bb075c325597be524f58a74f5a8ae6a9ba59291c | [
"MIT"
] | null | null | null | src/app/main/registros/Registros.js | lucianoarmoa98/indufar_prospeccion_medica-campos-recetas-main | bb075c325597be524f58a74f5a8ae6a9ba59291c | [
"MIT"
] | null | null | null | src/app/main/registros/Registros.js | lucianoarmoa98/indufar_prospeccion_medica-campos-recetas-main | bb075c325597be524f58a74f5a8ae6a9ba59291c | [
"MIT"
] | null | null | null | // React y Redux.
import React from 'react';
import {connect} from 'react-redux';
import {bindActionCreators} from 'redux';
import * as registrosActions from './store/actions';
// Material UI.
import MaterialTable, {MTableToolbar} from 'material-table-hotfix-initial-page-remote-data';
// Otros.
import {FusePageSimple} from '@fuse';
import {configuracionDeTabla} from './RegistrosConfig';
import {
construirParametrosDePaginacion,
END_POINT_REGISTROS,
languageConfig
} from '../UIUtils';
import {withRouter} from 'react-router-dom';
import {Paper} from '@material-ui/core';
import Button from '@material-ui/core/Button';
import BackupIcon from '@material-ui/icons/Backup';
import Footer from '../../../components/Form/Footer';
class Registros extends React.Component {
_isMounted = false;
constructor(props) {
super(props);
this.state = {
selectedFile: null
};
this.tableRef = React.createRef();
this.onChangeHandler = this.onChangeHandler.bind(this);
}
componentDidMount() {
this._isMounted = true;
}
componentWillUnmount() {
this._isMounted = false;
}
onChangeHandler(event) {
this.props.formEditSubmit(event.target.files[0]);
}
render() {
const {
registros: {list, formEdit},
listChangePage,
listChangeQuantityPerPage,
setSearchValue
} = this.props;
const {isSubmiting, success, error} = formEdit;
return (
<FusePageSimple
content={
<div className="p-24">
<MaterialTable
title={configuracionDeTabla.titulo}
columns={configuracionDeTabla.columnas}
tableRef={this.tableRef}
data={query => (
new Promise(resolve => {
let url = construirParametrosDePaginacion(query, END_POINT_REGISTROS);
fetch(url)
.then(response => response.json())
.then(result => {
if (!this._isMounted) {
return;
}
if (setSearchValue) {
setSearchValue(query.search);
}
resolve({
data: result.data,
page: result.paginaActual,
totalCount: result.totalRegistros
});
});
})
)}
components={{
Container: props => <Paper {...props} elevation={0}/>,
Toolbar: props => (
<div>
<MTableToolbar {...props} />
{/*<div style={{*/}
{/* display: 'flex',*/}
{/* flexDirection: 'row-reverse',*/}
{/* height: 56*/}
{/*}}>*/}
{/* <input*/}
{/* id="contained-button-file"*/}
{/* type="file"*/}
{/* multiple*/}
{/* name="file"*/}
{/* onChange={this.onChangeHandler}*/}
{/* style={{display: 'none'}}/>*/}
{/* <label htmlFor="contained-button-file">*/}
{/* <Button*/}
{/* component="span"*/}
{/* size='small'*/}
{/* variant="contained"*/}
{/* disableElevation*/}
{/* style={{*/}
{/* alignSelf: 'center',*/}
{/* marginRight: 16*/}
{/* }}*/}
{/* color='secondary'*/}
{/* startIcon={<BackupIcon />}>*/}
{/* Importar Excel*/}
{/* </Button>*/}
{/* </label>*/}
{/* <div style={{width: 400, marginLeft: 16, marginRight: 16}}>*/}
{/* <Footer*/}
{/* submitting={isSubmiting}*/}
{/* error={error}*/}
{/* success={success}/>*/}
{/* </div>*/}
{/*</div>*/}
</div>
)
}}
onChangePage={listChangePage}
onChangeRowsPerPage={listChangeQuantityPerPage}
localization={languageConfig}
options={{
pageSize: list.pageSize,
pageSizeOptions: list.pageSizeOptions,
initialPage: list.page,
searchText: list.searchText,
padding: 'dense',
actionsColumnIndex: -1,
debounceInterval: 900
}}/>
</div>
}/>
);
}
}
function mapStateToProps({registros}) {
return {registros};
}
function mapDispatchToProps(dispatch) {
return bindActionCreators(
{
listChangePage: registrosActions.listChangePage,
listChangeQuantityPerPage: registrosActions.listChangeQuantityPerPage,
setSearchValue: registrosActions.setSearchValue,
changeFilterValue: registrosActions.changeFilterValue,
formEditSubmit: registrosActions.formEditSubmit
},
dispatch
);
}
export default withRouter(connect(mapStateToProps, mapDispatchToProps)(Registros));
| 33.358025 | 92 | 0.477979 | 3 |
f51ea74045d5aa8e7f16bc66c89fef08f2dc1661 | 677 | lua | Lua | plugin/src/FormatScript/Vendor/Llama/List/zip.lua | howmanysmall/StyluaPlugin | a5f10432a82f68f2d746723007638e6240f759e9 | [
"MIT"
] | null | null | null | plugin/src/FormatScript/Vendor/Llama/List/zip.lua | howmanysmall/StyluaPlugin | a5f10432a82f68f2d746723007638e6240f759e9 | [
"MIT"
] | null | null | null | plugin/src/FormatScript/Vendor/Llama/List/zip.lua | howmanysmall/StyluaPlugin | a5f10432a82f68f2d746723007638e6240f759e9 | [
"MIT"
] | null | null | null | local Debug = require(script.Parent.Parent.Parent.Debug)
local Typer = require(script.Parent.Parent.Parent.Typer)
local Debug_Assert = Debug.Assert
local Typer_Array = Typer.Array
local function zip(...)
local new = {}
local argCount = select("#", ...)
if argCount <= 0 then
return new
end
local firstList = Debug_Assert(Typer_Array(select(1, ...)))
local minLen = #firstList
for i = 2, argCount do
local list = Debug_Assert(Typer_Array(select(i, ...)))
local len = #list
if len < minLen then
minLen = len
end
end
for i = 1, minLen do
new[i] = {}
for j = 1, argCount do
new[i][j] = select(j, ...)[i]
end
end
return new
end
return zip
| 17.358974 | 60 | 0.661743 | 3.09375 |
e8e12c70a26b28e73712420fd03691434cb4267c | 13,354 | py | Python | adversarial-transfer-nlp/CW_attack.py | AI-secure/Uncovering-the-Connections-BetweenAdversarial-Transferability-and-Knowledge-Transferability | a2fb10f56618c6d6dd1638967d59c4a83ffa1c05 | [
"CC0-1.0"
] | 8 | 2021-06-18T10:32:27.000Z | 2022-01-16T06:46:25.000Z | adversarial-transfer-nlp/CW_attack.py | AI-secure/Does-Adversairal-Transferability-Indicate-Knowledge-Transferability | a2fb10f56618c6d6dd1638967d59c4a83ffa1c05 | [
"CC0-1.0"
] | 2 | 2021-08-25T15:14:12.000Z | 2022-02-09T23:55:46.000Z | adversarial-transfer-nlp/CW_attack.py | AI-secure/Does-Adversairal-Transferability-Indicate-Knowledge-Transferability | a2fb10f56618c6d6dd1638967d59c4a83ffa1c05 | [
"CC0-1.0"
] | null | null | null | import sys
import torch
import numpy as np
from torch import optim
from util import args
class CarliniL2:
def __init__(self, targeted=True, search_steps=None, max_steps=None, cuda=True, debug=False, num_classes=14):
self.debug = debug
self.targeted = targeted
self.num_classes = num_classes
self.confidence = args.confidence # FIXME need to find a good value for this, 0 value used in paper not doing much...
self.initial_const = args.const # bumped up from default of .01 in reference code
self.binary_search_steps = search_steps or 1
self.repeat = self.binary_search_steps >= 10
self.max_steps = max_steps or args.max_steps
self.abort_early = True
self.cuda = cuda
self.mask = None
self.batch_info = None
self.wv = None
self.seq = None
self.seq_len = None
self.init_rand = False # an experiment, does a random starting point help?
def _compare(self, output, target):
if not isinstance(output, (float, int, np.int64)):
output = np.copy(output)
# if self.targeted:
# output[target] -= self.confidence
# else:
# output[target] += self.confidence
output = np.argmax(output)
if self.targeted:
return output == target
else:
return output != target
def _compare_untargeted(self, output, target):
if not isinstance(output, (float, int, np.int64)):
output = np.copy(output)
# if self.targeted:
# output[target] -= self.confidence
# else:
# output[target] += self.confidence
output = np.argmax(output)
if self.targeted:
return output == target + 1 or output == target - 1
else:
return output != target
def _loss(self, output, target, dist, scale_const):
# compute the probability of the label class versus the maximum other
real = (target * output).sum(1)
other = ((1. - target) * output - target * 10000.).max(1)[0]
if self.targeted:
# if targeted, optimize for making the other class most likely
loss1 = torch.clamp(other - real + self.confidence, min=0.) # equiv to max(..., 0.)
else:
# if non-targeted, optimize for making this class least likely.
loss1 = torch.clamp(real - other + self.confidence, min=0.) # equiv to max(..., 0.)
loss1 = torch.sum(scale_const * loss1)
loss2 = dist.sum()
if args.debug_cw:
print("loss 1:", loss1.item(), " loss 2:", loss2.item())
loss = loss1 + loss2
return loss
def _optimize(self, optimizer, model, input_var, modifier_var, target_var, scale_const_var, input_token=None):
# apply modifier and clamp resulting image to keep bounded from clip_min to clip_max
batch_adv_sent = []
if self.mask is None:
# not word-level attack
input_adv = modifier_var + input_var
output = model(input_adv)
input_adv = model.get_embedding()
input_var = input_token
seqback = model.get_seqback()
batch_adv_sent = seqback.adv_sent.copy()
seqback.adv_sent = []
# input_adv = self.itereated_var = modifier_var + self.itereated_var
else:
# word level attack
input_adv = modifier_var * self.mask + self.itereated_var
# input_adv = modifier_var * self.mask + input_var
for i in range(input_adv.size(0)):
# for batch size
new_word_list = []
add_start = self.batch_info['add_start'][i]
add_end = self.batch_info['add_end'][i]
if add_end < 0:
add_end = len(input_adv[i]) - 1
for j in range(add_start, add_end):
new_placeholder = input_adv[i, j].data
temp_place = new_placeholder.expand_as(self.wv)
new_dist = torch.norm(temp_place - self.wv.data, 2, -1)
_, new_word = torch.min(new_dist, 0)
new_word_list.append(new_word.item())
# input_adv.data[j, i] = self.wv[new_word.item()].data
input_adv.data[i, j] = self.itereated_var.data[i, j] = self.wv[new_word.item()].data
del temp_place
batch_adv_sent.append(new_word_list)
output = model(self.seq, self.batch_info['segment_ids'], self.batch_info['input_mask'], inputs_embeds=input_adv)
if args.debug_cw:
print("output:", batch_adv_sent)
print("input_adv:", input_adv)
print("output:", output)
adv_seq = torch.tensor(self.seq)
for bi, (add_start, add_end) in enumerate(zip(self.batch_info['add_start'], self.batch_info['add_end'])):
adv_seq.data[bi, add_start:add_end] = torch.LongTensor(batch_adv_sent)
print("out:", adv_seq)
print("out embedding:", model.bert.embeddings.word_embeddings(adv_seq))
out = model(adv_seq, self.seq_len)['pred']
print("out:", out)
def reduce_sum(x, keepdim=True):
# silly PyTorch, when will you get proper reducing sums/means?
for a in reversed(range(1, x.dim())):
x = x.sum(a, keepdim=keepdim)
return x
def l1_dist(x, y, keepdim=True):
d = torch.abs(x - y)
return reduce_sum(d, keepdim=keepdim)
def l2_dist(x, y, keepdim=True):
d = (x - y) ** 2
return reduce_sum(d, keepdim=keepdim)
# distance to the original input data
if args.l1:
dist = l1_dist(input_adv, input_var, keepdim=False)
else:
dist = l2_dist(input_adv, input_var, keepdim=False)
loss = self._loss(output, target_var, dist, scale_const_var)
if args.debug_cw:
print(loss)
optimizer.zero_grad()
if input_token is None:
loss.backward()
else:
loss.backward(retain_graph=True)
torch.nn.utils.clip_grad_norm_([modifier_var], args.clip)
# print(modifier_var)
optimizer.step()
# print(modifier_var)
# modifier_var.data -= 2 * modifier_var.grad.data
# modifier_var.grad.data.zero_()
loss_np = loss.item()
dist_np = dist.data.cpu().numpy()
output_np = output.data.cpu().numpy()
input_adv_np = input_adv.data.cpu().numpy()
return loss_np, dist_np, output_np, input_adv_np, batch_adv_sent
def run(self, model, input, target, batch_idx=0, batch_size=None, input_token=None):
if batch_size is None:
batch_size = input.size(0) # ([length, batch_size, nhim])
# set the lower and upper bounds accordingly
lower_bound = np.zeros(batch_size)
scale_const = np.ones(batch_size) * self.initial_const
upper_bound = np.ones(batch_size) * 1e10
# python/numpy placeholders for the overall best l2, label score, and adversarial image
o_best_l2 = [1e10] * batch_size
o_best_score = [-1] * batch_size
o_best_logits = {}
if input_token is None:
best_attack = input.cpu().detach().numpy()
o_best_attack = input.cpu().detach().numpy()
else:
best_attack = input_token.cpu().detach().numpy()
o_best_attack = input_token.cpu().detach().numpy()
self.o_best_sent = {}
self.best_sent = {}
# setup input (image) variable, clamp/scale as necessary
input_var = torch.tensor(input, requires_grad=False)
self.itereated_var = torch.tensor(input_var)
# setup the target variable, we need it to be in one-hot form for the loss function
target_onehot = torch.zeros(target.size() + (self.num_classes,))
# print(target_onehot.size())
if self.cuda:
target_onehot = target_onehot.cuda()
target_onehot.scatter_(1, target.unsqueeze(1), 1.)
target_var = torch.tensor(target_onehot, requires_grad=False)
# setup the modifier variable, this is the variable we are optimizing over
modifier = torch.zeros(input_var.size()).float().cuda()
if self.cuda:
modifier = modifier.cuda()
modifier_var = torch.tensor(modifier, requires_grad=True)
optimizer = optim.Adam([modifier_var], lr=args.lr)
for search_step in range(self.binary_search_steps):
if args.debug_cw:
print('Batch: {0:>3}, search step: {1}'.format(batch_idx, search_step))
print('Const:')
for i, x in enumerate(scale_const):
print(i, x)
best_l2 = [1e10] * batch_size
best_score = [-1] * batch_size
best_logits = {}
# The last iteration (if we run many steps) repeat the search once.
if self.repeat and search_step == self.binary_search_steps - 1:
scale_const = upper_bound
scale_const_tensor = torch.from_numpy(scale_const).float()
if self.cuda:
scale_const_tensor = scale_const_tensor.cuda()
scale_const_var = torch.tensor(scale_const_tensor, requires_grad=False)
for step in range(self.max_steps):
# perform the attack
if self.mask is None:
if args.decreasing_temp:
cur_temp = args.temp - (args.temp - 0.1) / (self.max_steps - 1) * step
model.set_temp(cur_temp)
if args.debug_cw:
print("temp:", cur_temp)
else:
model.set_temp(args.temp)
loss, dist, output, adv_img, adv_sents = self._optimize(
optimizer,
model,
input_var,
modifier_var,
target_var,
scale_const_var,
input_token)
for i in range(batch_size):
target_label = target[i]
output_logits = output[i]
output_label = np.argmax(output_logits)
di = dist[i]
if self.debug:
if step % 100 == 0:
print('{0:>2} dist: {1:.5f}, output: {2:>3}, {3:5.3}, target {4:>3}'.format(
i, di, output_label, output_logits[output_label], target_label))
if di < best_l2[i] and self._compare_untargeted(output_logits, target_label):
# if self._compare(output_logits, target_label):
if self.debug:
print('{0:>2} best step, prev dist: {1:.5f}, new dist: {2:.5f}'.format(
i, best_l2[i], di))
best_l2[i] = di
best_score[i] = output_label
best_logits[i] = output_logits
best_attack[i] = adv_img[i]
self.best_sent[i] = adv_sents[i]
if di < o_best_l2[i] and self._compare(output_logits, target_label):
# if self._compare(output_logits, target_label):
if self.debug:
print('{0:>2} best total, prev dist: {1:.5f}, new dist: {2:.5f}'.format(
i, o_best_l2[i], di))
o_best_l2[i] = di
o_best_score[i] = output_label
o_best_logits[i] = output_logits
o_best_attack[i] = adv_img[i]
self.o_best_sent[i] = adv_sents[i]
sys.stdout.flush()
# end inner step loop
# adjust the constants
batch_failure = 0
batch_success = 0
for i in range(batch_size):
if self._compare(o_best_score[i], target[i]) and o_best_score[i] != -1:
batch_success += 1
if args.debug_cw:
print(self.o_best_sent[i])
print(o_best_score[i])
print(o_best_logits[i])
elif self._compare_untargeted(best_score[i], target[i]) and best_score[i] != -1:
o_best_l2[i] = best_l2[i]
o_best_score[i] = best_score[i]
o_best_attack[i] = best_attack[i]
self.o_best_sent[i] = self.best_sent[i]
if args.debug_cw:
print(self.o_best_sent[i])
print(o_best_score[i])
print(o_best_logits[i])
batch_success += 1
else:
batch_failure += 1
print('Num failures: {0:2d}, num successes: {1:2d}\n'.format(batch_failure, batch_success))
sys.stdout.flush()
# end outer search loop
return o_best_attack
| 44.962963 | 126 | 0.543582 | 3.03125 |
85963afcca8eca6c1bb7832716a10d2260d05acc | 1,474 | js | JavaScript | client/src/components/utils/InfiniteScroll.js | palaumarc/flickr_gallery | e6a194955016fa696610176c897aca1f88ab7acd | [
"MIT"
] | null | null | null | client/src/components/utils/InfiniteScroll.js | palaumarc/flickr_gallery | e6a194955016fa696610176c897aca1f88ab7acd | [
"MIT"
] | null | null | null | client/src/components/utils/InfiniteScroll.js | palaumarc/flickr_gallery | e6a194955016fa696610176c897aca1f88ab7acd | [
"MIT"
] | null | null | null | import React, { Component, Fragment } from 'react';
import PropTypes from 'prop-types';
import Spin from './Spin';
class InfiniteScroll extends Component {
static propTypes = {
loadMore: PropTypes.func.isRequired,
hasMore: PropTypes.bool
}
static defaultProps = {
hasMore: true
}
state = {
isLoading: false
}
onScroll = () => {
const { isLoading } = this.state;
if (isLoading) return;
// Checks that the page has scrolled to the bottom
if (window.innerHeight + document.documentElement.scrollTop === document.documentElement.offsetHeight) {
this.execLoadMore();
}
};
execLoadMore = async () => {
this.setState(prevState => ({...prevState, isLoading: true}));
await this.props.loadMore()
this.setState(prevState => ({...prevState, isLoading: false}));
if (!this.props.hasMore) {
document.removeEventListener('scroll', this.onScroll);
}
}
async componentDidMount() {
document.addEventListener('scroll', this.onScroll);
// Keep loading until available height is filled or there are no more elements
while (document.documentElement.offsetHeight < window.innerHeight && this.props.hasMore) {
await this.execLoadMore();
}
}
render() {
return (
<Fragment>
{this.props.children}
{this.state.isLoading ? <Spin /> : null}
</Fragment>
)
}
}
export default InfiniteScroll; | 24.163934 | 110 | 0.630258 | 3.265625 |
a8dbe5bd998d90d98e3c8c88c85874a068d84095 | 5,205 | rs | Rust | rs/replicated_state/src/page_map/tests.rs | audieleon/ic | 35dd8f93dec82662ed4df35664a9c0be6dbf203a | [
"Apache-2.0"
] | 1 | 2021-07-20T21:44:44.000Z | 2021-07-20T21:44:44.000Z | rs/replicated_state/src/page_map/tests.rs | AmoretAaron/ic | d9202bc7f6d16b2777d4e092ee1b7ad5899aae2b | [
"Apache-2.0"
] | null | null | null | rs/replicated_state/src/page_map/tests.rs | AmoretAaron/ic | d9202bc7f6d16b2777d4e092ee1b7ad5899aae2b | [
"Apache-2.0"
] | 1 | 2022-01-13T13:37:41.000Z | 2022-01-13T13:37:41.000Z | use super::{allocate_pages, checkpoint::Checkpoint, Buffer, PageDelta, PageIndex, PageMap};
use ic_sys::PAGE_SIZE;
use std::fs::OpenOptions;
#[test]
fn can_debug_display_a_page_map() {
let page_map = PageMap::new();
assert_eq!(format!("{:?}", page_map), "{}");
}
#[test]
fn can_create_an_empty_checkpoint() {
let checkpoint = Checkpoint::empty();
let empty_page = vec![0; *PAGE_SIZE];
let first_page = checkpoint.get_page(PageIndex::from(1));
assert_eq!(&empty_page[..], first_page);
}
#[test]
fn empty_page_map_returns_zeroed_pages() {
let page_map = PageMap::new();
let page = page_map.get_page(PageIndex::from(1));
assert_eq!(page.len(), *PAGE_SIZE);
assert!(page.iter().all(|b| *b == 0));
}
#[test]
fn can_update_a_page_map() {
let mut page_map = PageMap::new();
let ones = vec![1u8; *PAGE_SIZE];
let twos = vec![2u8; *PAGE_SIZE];
let delta = PageDelta::from(
&[
(PageIndex::from(1), &ones[..]),
(PageIndex::from(2), &twos[..]),
][..],
);
page_map.update(delta);
for (num, contents) in &[(1, 1), (2, 2), (3, 0)] {
assert!(page_map
.get_page(PageIndex::from(*num))
.iter()
.all(|b| *b == *contents));
}
}
#[test]
fn can_allocate_pages() {
let page = vec![5; *PAGE_SIZE];
let tracked_pages = allocate_pages(&[&page[..]]);
assert_eq!(tracked_pages.len(), 1);
assert_eq!(tracked_pages[0].contents(), page.as_slice());
}
#[test]
fn can_make_page_deltas() {
let page = vec![5u8; *PAGE_SIZE];
let page_delta = PageDelta::from(&[(PageIndex::from(5), &page[..])][..]);
assert_eq!(page_delta.len(), 1);
assert_eq!(page_delta.get_page(PageIndex::from(5)).unwrap(), &page[..])
}
#[test]
fn left_delta_wins_in_extend() {
let page_1 = vec![1u8; *PAGE_SIZE];
let page_2 = vec![2u8; *PAGE_SIZE];
let delta_1 = PageDelta::from(&[(PageIndex::from(1), &page_1[..])][..]);
let delta_2 = PageDelta::from(&[(PageIndex::from(1), &page_2[..])][..]);
let union_12 = delta_1.extend(delta_2);
assert_eq!(union_12.len(), 1);
assert_eq!(union_12.get_page(PageIndex::from(1)).unwrap(), &page_1[..]);
}
#[test]
fn persisted_map_is_equivalent_to_the_original() {
let tmp = tempfile::Builder::new()
.prefix("checkpoints")
.tempdir()
.unwrap();
let heap_file = tmp.path().join("heap");
let page_1 = vec![1u8; *PAGE_SIZE];
let page_3 = vec![3u8; *PAGE_SIZE];
let delta = PageDelta::from(
&[
(PageIndex::from(1), &page_1[..]),
(PageIndex::from(3), &page_3[..]),
][..],
);
let mut original_map = PageMap::default();
original_map.update(delta);
original_map.persist_delta(&heap_file).unwrap();
let persisted_map = PageMap::open(&heap_file).unwrap();
assert_eq!(persisted_map, original_map);
}
#[test]
fn can_persist_and_load_an_empty_page_map() {
let tmp = tempfile::Builder::new()
.prefix("checkpoints")
.tempdir()
.unwrap();
let heap_file = tmp.path().join("heap");
let original_map = PageMap::default();
original_map.persist_delta(&heap_file).unwrap();
let persisted_map = PageMap::open(&heap_file).expect("opening an empty page map must succeed");
assert_eq!(original_map, persisted_map);
}
#[test]
fn returns_an_error_if_file_size_is_not_a_multiple_of_page_size() {
use std::io::Write;
let tmp = tempfile::Builder::new()
.prefix("checkpoints")
.tempdir()
.unwrap();
let heap_file = tmp.path().join("heap");
OpenOptions::new()
.write(true)
.create(true)
.open(&heap_file)
.unwrap()
.write_all(&vec![1; *PAGE_SIZE / 2])
.unwrap();
match PageMap::open(&heap_file) {
Err(err) => assert!(
err.is_invalid_heap_file(),
"Expected invalid heap file error, got {:?}",
err
),
Ok(_) => panic!("Expected a invalid heap file error, got Ok(_)"),
}
}
#[test]
fn can_use_buffer_to_modify_page_map() {
let page_1 = vec![1u8; *PAGE_SIZE];
let page_3 = vec![3u8; *PAGE_SIZE];
let delta = PageDelta::from(
&[
(PageIndex::from(1), &page_1[..]),
(PageIndex::from(3), &page_3[..]),
][..],
);
let mut page_map = PageMap::default();
page_map.update(delta);
let n = 4 * *PAGE_SIZE;
let mut vec_buf = vec![0u8; n];
vec_buf[*PAGE_SIZE..2 * *PAGE_SIZE].copy_from_slice(&page_1);
vec_buf[3 * *PAGE_SIZE..4 * *PAGE_SIZE].copy_from_slice(&page_3);
let mut buf = Buffer::new(page_map);
let mut read_buf = vec![0u8; n];
buf.read(&mut read_buf[..], 0);
assert_eq!(read_buf, vec_buf);
for offset in 0..n {
let mut len = 1;
while (offset + len) < n {
let b = ((offset + len) % 15) as u8;
for dst in vec_buf.iter_mut().skip(offset).take(len) {
*dst = b;
}
buf.write(&vec_buf[offset..offset + len], offset);
buf.read(&mut read_buf[..], 0);
assert_eq!(read_buf, vec_buf);
len *= 2;
}
}
}
| 27.68617 | 99 | 0.578866 | 3.09375 |
71e68956d75edcf9698c155dfeb03a06cc6ecaee | 1,472 | kt | Kotlin | modules/drawing/src/main/kotlin/silentorb/mythic/drawing/drawText.kt | silentorb/mythic-kotlin | 74462fcba9e7805dddec1bfcb3431665df7d0dee | [
"MIT"
] | 1 | 2020-06-24T02:01:23.000Z | 2020-06-24T02:01:23.000Z | modules/drawing/src/main/kotlin/silentorb/mythic/drawing/drawText.kt | silentorb/mythic-kotlin | 74462fcba9e7805dddec1bfcb3431665df7d0dee | [
"MIT"
] | 5 | 2019-12-24T17:14:43.000Z | 2021-01-26T22:33:47.000Z | modules/drawing/src/main/kotlin/silentorb/mythic/drawing/drawText.kt | silentorb/mythic-kotlin | 74462fcba9e7805dddec1bfcb3431665df7d0dee | [
"MIT"
] | null | null | null | package silentorb.mythic.drawing
import silentorb.mythic.glowing.DrawMethod
import silentorb.mythic.glowing.VertexSchema
import silentorb.mythic.glowing.globalState
import silentorb.mythic.spatial.Matrix
import silentorb.mythic.spatial.Vector2
import silentorb.mythic.typography.TextConfiguration
import silentorb.mythic.typography.TextPackage
import silentorb.mythic.typography.prepareText
import silentorb.mythic.spatial.Vector2i
import org.lwjgl.opengl.GL11.*
fun getUnitScaling(dimensions: Vector2i) =
if (dimensions.x < dimensions.y)
Vector2(1f, dimensions.x.toFloat() / dimensions.y)
else
Vector2(dimensions.y.toFloat() / dimensions.x, 1f)
fun prepareTextMatrix(pixelsToScalar: Matrix, position: Vector2) =
Matrix.identity
.mul(pixelsToScalar)
.translate(position.x, position.y, 0f)
fun renderText(config: TextConfiguration, effect: ColoredImageShader, textPackage: TextPackage, transform: Matrix) {
effect.activate(transform, config.style.color, config.style.font.texture)
globalState.blendEnabled = true
globalState.blendFunction = Pair(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
textPackage.mesh.draw(DrawMethod.triangleFan)
}
fun drawTextRaw(config: TextConfiguration, effect: ColoredImageShader, vertexSchema: VertexSchema, transform: Matrix) {
val textPackage = prepareText(config, vertexSchema)
if (textPackage != null) {
renderText(config, effect, textPackage, transform)
textPackage.mesh.dispose()
}
}
| 36.8 | 119 | 0.796196 | 3.109375 |
f6743e007e18ef6144696c5a28916071fb960531 | 1,915 | kt | Kotlin | src/main/kotlin/ui/util/swing/Tables.kt | marshallbrain/pulsar-java | fed43a0164421b27056045446f07982e0313452d | [
"MIT"
] | null | null | null | src/main/kotlin/ui/util/swing/Tables.kt | marshallbrain/pulsar-java | fed43a0164421b27056045446f07982e0313452d | [
"MIT"
] | null | null | null | src/main/kotlin/ui/util/swing/Tables.kt | marshallbrain/pulsar-java | fed43a0164421b27056045446f07982e0313452d | [
"MIT"
] | null | null | null | package ui.util.swing
import java.awt.Dimension
import javax.swing.BorderFactory
import javax.swing.JScrollPane
import javax.swing.JTable
import javax.swing.ListSelectionModel
import javax.swing.border.Border
import javax.swing.table.DefaultTableModel
fun createScrollTable(
table: JTable,
maxVisibleRows: Int = table.rowCount,
border: Border = BorderFactory.createEmptyBorder()
) : JScrollPane {
val scrollPane = object : ContinuesScrollPane(table) {
override fun getPreferredSize(): Dimension {
val insets = border.getBorderInsets(this)
return Dimension(0, (table.rowHeight)*maxVisibleRows) +
columnHeader.preferredSize +
Dimension(insets.left + insets.right, insets.top + insets.bottom)
}
override fun getMinimumSize(): Dimension = preferredSize
override fun getMaximumSize(): Dimension = preferredSize
init {
addMouseWheelListener(ContinuesMouseWheelListener())
}
}
scrollPane.border = border
return scrollPane
}
fun createTable(
vararg columnNames: String,
data: MutableList<*> = emptyList<String>().toMutableList()
) : JTable {
val formattedData = getFormattedData(data, *columnNames)
val table = object : JTable(DefaultTableModel(formattedData, columnNames)) {
override fun isCellEditable(row: Int, column: Int): Boolean = false
}
table.setSelectionMode(ListSelectionModel.SINGLE_SELECTION)
table.cellSelectionEnabled = false
table.rowSelectionAllowed = true
table.fillsViewportHeight = true
return table
}
private fun getFormattedData(
data: MutableList<*>,
vararg columnNames: String
) : Array<Array<*>> {
val colNum = columnNames.size
val rowNum = data.size / colNum
return List(rowNum) { row ->
List(colNum) { col ->
data[row * colNum + col]
}.toTypedArray()
}.toTypedArray()
}
private operator fun Dimension.plus(i : Dimension) : Dimension {
return Dimension(width + i.width, height + i.height)
}
| 23.641975 | 77 | 0.746214 | 3.140625 |
124f49977e548fd065352dda1813f453f87675ba | 5,700 | h | C | src/backgroundprocess/app.h | Skycoder42/QBackgroundProcess | 9681aa736729ce1b7985e8664e0f05729235e0c4 | [
"BSD-3-Clause"
] | 10 | 2017-08-17T16:34:03.000Z | 2021-02-16T17:42:30.000Z | src/backgroundprocess/app.h | Skycoder42/QBackgroundProcess | 9681aa736729ce1b7985e8664e0f05729235e0c4 | [
"BSD-3-Clause"
] | null | null | null | src/backgroundprocess/app.h | Skycoder42/QBackgroundProcess | 9681aa736729ce1b7985e8664e0f05729235e0c4 | [
"BSD-3-Clause"
] | 4 | 2018-08-27T06:15:11.000Z | 2021-03-08T10:10:34.000Z | #ifndef QTBACKGROUNDPROCESS_APP_H
#define QTBACKGROUNDPROCESS_APP_H
#include "QtBackgroundProcess/qtbackgroundprocess_global.h"
#include <QtCore/qcommandlineparser.h>
#include <QtCore/qcoreapplication.h>
#include <QtCore/qexception.h>
#include <functional>
//! The Namespace containing all classes of the QtBackgroundProcess module
namespace QtBackgroundProcess {
class Terminal;
//! Will be thrown, if you perform an operation, that is not allowed in running state
class Q_BACKGROUNDPROCESS_EXPORT NotAllowedInRunningStateException : public QException
{
public:
NotAllowedInRunningStateException();
//! @inherit{std::exception::what}
const char *what() const noexcept override;
//! @inherit{QException::raise}
void raise() const override;
//! @inherit{QException::clone}
QException *clone() const override;
};
class AppPrivate;
//! The background process application. The main class of QtBackgroundProcess
class Q_BACKGROUNDPROCESS_EXPORT App : public QCoreApplication
{
Q_OBJECT
friend class AppPrivate;
//! The current id of the singleton instance of the master process
Q_PROPERTY(QString instanceID READ instanceID WRITE setInstanceID RESET createDefaultInstanceID)
//! Specify whether the app should be a systemwide or userwide single instance
Q_PROPERTY(bool globalInstance READ globalInstance WRITE setGlobalInstance)
//! Specifies, whether the master should forward debug output to all terminals
Q_PROPERTY(bool forwardMasterLog READ forwardMasterLog WRITE setForwardMasterLog)
//! If true, the master process will always be started, not only with "start"
Q_PROPERTY(bool autoStartMaster READ autoStartMaster WRITE setAutoStartMaster)
//! If true, "start" commands will be ignored, if the master is already running
Q_PROPERTY(bool ignoreMultiStarts READ ignoreMultiStarts WRITE setIgnoreMultiStarts)
//! If true, the master process will automatically delete terminals that have been disconnected
Q_PROPERTY(bool autoDeleteTerminals READ autoDeleteTerminals WRITE setAutoDeleteTerminals)
//! If true, the master process will automatically close terminals after it received the parameters
Q_PROPERTY(bool autoKillTerminals READ autoKillTerminals WRITE setAutoKillTerminals)
//! Holds a list of all currently connected terminals
Q_PROPERTY(QList<Terminal*> connectedTerminals READ connectedTerminals NOTIFY connectedTerminalsChanged)
public:
//! Creates a new app with it's arguments
App(int &argc, char **argv, int flags = ApplicationFlags);
//! Destructor
~App();
//! @readAcFn{App::instanceID}
QString instanceID() const;
//! @readAcFn{App::globalInstance}
bool globalInstance() const;
//! @readAcFn{App::forwardMasterLog}
bool forwardMasterLog() const;
//! @readAcFn{App::autoStartMaster}
bool autoStartMaster() const;
//! @readAcFn{App::ignoreMultiStarts}
bool ignoreMultiStarts() const;
//! @readAcFn{App::autoDeleteTerminals}
bool autoDeleteTerminals() const;
//! @readAcFn{App::autoKillTerminals}
bool autoKillTerminals() const;
//! Sets the function to be called for the creation of the parser (Instead of overriding)
void setParserSetupFunction(const std::function<void(QCommandLineParser &)> &function);
//! Sets the function to be called to startup the application (Instead of overriding)
void setStartupFunction(const std::function<int (const QCommandLineParser &)> &function);
//! Sets the function to be called to handle shutdown requests (Instead of overriding)
void setShutdownRequestFunction(const std::function<bool(const QCommandLineParser &, int&)> &function);
//! Sets the function to be called to handle shutdown requests (Instead of overriding)
void setShutdownRequestFunction(const std::function<bool(Terminal*, int&)> &function);
//! Executes the application event loop
int exec();
//! @readAcFn{App::connectedTerminals}
QList<Terminal*> connectedTerminals() const;
public Q_SLOTS:
//! @resetAcFn{App::instanceID}
void createDefaultInstanceID(bool overwrite = true);
//! @writeAcFn{App::instanceID}
void setInstanceID(QString instanceID, bool useAsSeed = true);
//! @writeAcFn{App::globalInstance}
void setGlobalInstance(bool globalInstance);
//! @writeAcFn{App::forwardMasterLog}
void setForwardMasterLog(bool forwardMasterLog);
//! @writeAcFn{App::autoStartMaster}
void setAutoStartMaster(bool autoStartMaster);
//! @writeAcFn{App::ignoreMultiStarts}
void setIgnoreMultiStarts(bool ignoreMultiStarts);
//! @writeAcFn{App::autoDeleteTerminals}
void setAutoDeleteTerminals(bool autoDeleteTerminals, bool changeCurrent = false);
//! @writeAcFn{App::autoKillTerminals}
void setAutoKillTerminals(bool autoKillTerminals, bool killCurrent = false);
Q_SIGNALS:
//! Will be emitted when a new terminal has connected to the master
void newTerminalConnected(QtBackgroundProcess::Terminal *terminal, QPrivateSignal);
//! Will be emitted when a new terminal sent arguments to the master
void commandReceived(QSharedPointer<QCommandLineParser> parser, bool isStarter, QPrivateSignal);
//! @notifyAcFn{App::connectedTerminals}
void connectedTerminalsChanged(QList<Terminal*> connectedTerminals, QPrivateSignal);
protected:
//! Sets up the parser to parse commands and arguments
virtual void setupParser(QCommandLineParser &parser, bool useShortOptions = true);
//! Is called as initialization function of the master process
virtual int startupApp(const QCommandLineParser &parser);
//! Gets called when a terminal requests a shutdown of the master
virtual bool requestAppShutdown(Terminal *terminal, int &exitCode);
private:
AppPrivate* d;
};
}
#undef qApp
#define qApp static_cast<QtBackgroundProcess::App*>(QCoreApplication::instance())
#endif // QTBACKGROUNDPROCESS_APP_H
| 41.605839 | 105 | 0.792982 | 3.140625 |
f037e3032ce0ce519e6e32edcbecb11a4130e35e | 1,595 | js | JavaScript | www/js/search_worker.js | xqqy/myfuzhong | 25e7f6e79b65a4e2c550f4b8d43a35f5030fcf1b | [
"Apache-2.0"
] | 1 | 2018-01-27T15:48:01.000Z | 2018-01-27T15:48:01.000Z | www/js/search_worker.js | xqqy/myfuzhong | 25e7f6e79b65a4e2c550f4b8d43a35f5030fcf1b | [
"Apache-2.0"
] | null | null | null | www/js/search_worker.js | xqqy/myfuzhong | 25e7f6e79b65a4e2c550f4b8d43a35f5030fcf1b | [
"Apache-2.0"
] | null | null | null | var list,all;
function dialogAlert(message, title, buttonname, callback) { //通知服务
title = title || "错误";
buttonname = buttonname || "确定";
callback = callback || function () {
return;
}
if(navigator.notification){
navigator.notification.alert(message, callback, title, buttonname);
}else{
alert(message);
}
}
function get(e) {//获取列表
var data=new FormData;
data.append("UID",e.data[2]);
data.append("TOKEN",e.data[3]);
var xhr = new XMLHttpRequest;
xhr.open("post", e.data[1], true);
xhr.onreadystatechange = function () {
if (xhr.readyState == 4) {
if (xhr.status == 200) {
if (xhr.responseText.split("/meow/")[0] == "done") {
list = xhr.responseText.split("/meow/");
all = list.length
onmsg(e)
} else {
dialogAlert(xhr.responseText);
close();
}
} else {
dialogAlert("网络不能连接") + xhr.status;
close();
}
}
}
xhr.send(data);
}
onmessage = onmsg;
function onmsg(e) {
if(!list){
get(e);
return;
}
var now = 1,
ret = "";
while (now < all) {
if (list[now+2].toUpperCase().indexOf(document.getElementById("search").value.toUpperCase()) > -1) {
ret+='<a href="#" class="collection-item" onclick="ati('+"'"+list[now+1]+"'"+','+"'"+list[now]+"'"+')">'+list[now+2]+'</a>';
}
now += 3;
}
postMessage(ret);
}; | 29 | 136 | 0.485893 | 3.140625 |
0bc6ef3ed241becc1afee395cbae4a3b843d3a02 | 1,342 | js | JavaScript | app/components/layout/Header.js | yarikgenza/trello | a95f9418757aedd696ee24ee9c4be4fd4ab01a67 | [
"MIT"
] | 1 | 2017-01-07T23:57:15.000Z | 2017-01-07T23:57:15.000Z | app/components/layout/Header.js | yarikgenza/trello | a95f9418757aedd696ee24ee9c4be4fd4ab01a67 | [
"MIT"
] | null | null | null | app/components/layout/Header.js | yarikgenza/trello | a95f9418757aedd696ee24ee9c4be4fd4ab01a67 | [
"MIT"
] | null | null | null | import React, {Component} from 'react';
import {Button} from 'react-bootstrap';
export default class Header extends Component {
constructor() {
super();
this.state = {
text: ''
}
}
componentDidMount() {
const token = localStorage.getItem('token');
const {text} = this.state;
if (token) {
fetch('/api/user', {
method: 'get',
headers: {
'Content-type': 'application/json',
authorization: token
}
})
.then(res => res.json())
.then((res) => {
this.setState({
text: res.login
})
})
.catch((e) => { console.log(e) })
}
}
logOut() {
localStorage.removeItem('token');
window.location.href = '/'
}
render() {
const token = localStorage.getItem('token');
if (token) {
return (
<div className="header">
<h1 className="header_positioned">Trello</h1>
<div className="logOut">
<div className="userLogin">
<p>{this.state.text}</p>
</div>
<Button onClick={() => this.logOut()} bsStyle="info">Log out</Button>
</div>
</div>
)
} else {
return (
<div className="header">
<h1>Trello</h1>
</div>
)
}
}
}
| 20.333333 | 81 | 0.481371 | 3.046875 |
11c17cffca45808b3ff136341eb793f249307f98 | 2,384 | rs | Rust | src/pointer.rs | sim82/game1 | e4cd03c91421dea3234eaf7b4f9fabbae30312be | [
"MIT"
] | null | null | null | src/pointer.rs | sim82/game1 | e4cd03c91421dea3234eaf7b4f9fabbae30312be | [
"MIT"
] | null | null | null | src/pointer.rs | sim82/game1 | e4cd03c91421dea3234eaf7b4f9fabbae30312be | [
"MIT"
] | null | null | null | use bevy::{
input::mouse::{MouseButtonInput, MouseMotion},
prelude::*,
};
pub struct MouseGrabState {
pub shall_grab: bool,
known_state: bool,
}
fn mouse_grab_system(
mut grab_state: ResMut<MouseGrabState>,
mut windows: ResMut<Windows>,
keyboard_input: Res<Input<KeyCode>>,
) {
let update = if keyboard_input.just_pressed(KeyCode::Grave) {
grab_state.shall_grab = !grab_state.shall_grab;
true
} else {
false
};
if update || !grab_state.known_state {
grab_state.known_state = true;
let window = windows.get_primary_mut().unwrap();
if window.cursor_locked() != grab_state.shall_grab {
window.set_cursor_lock_mode(grab_state.shall_grab);
window.set_cursor_visibility(!grab_state.shall_grab);
}
}
}
#[derive(Default)]
pub struct PrimaryPointerPos {
pub pos: Vec3,
}
#[derive(Debug)]
pub struct ClickEvent {
pub pos: Vec3,
}
#[derive(Component)]
pub struct MousePointerFlag;
pub fn mouse_input_system(
mut query: Query<&mut Transform, With<MousePointerFlag>>,
mut mouse_button_input_events: EventReader<MouseButtonInput>,
mut mouse_motion_events: EventReader<MouseMotion>,
mut primary_pointer: ResMut<PrimaryPointerPos>,
grab_state: Res<MouseGrabState>,
mut click_events: EventWriter<ClickEvent>,
) {
if !grab_state.shall_grab {
return;
}
for mut transform in query.iter_mut() {
for event in mouse_motion_events.iter() {
let d = Vec3::new(event.delta.x, -event.delta.y, 0.0);
transform.translation += d * 0.5;
}
primary_pointer.pos = transform.translation;
}
for event in mouse_button_input_events.iter() {
if event.button == MouseButton::Left && event.state.is_pressed() {
// info!("pressed");
click_events.send(ClickEvent {
pos: primary_pointer.pos,
})
}
}
}
pub struct PointerPlugin;
impl Plugin for PointerPlugin {
fn build(&self, app: &mut App) {
app.add_system(mouse_input_system)
.add_system(mouse_grab_system)
.init_resource::<PrimaryPointerPos>()
.insert_resource(MouseGrabState {
shall_grab: true,
known_state: false,
})
.add_event::<ClickEvent>();
}
}
| 26.786517 | 74 | 0.630453 | 3.078125 |
18bf1a0139d71e8ef2b676cb4ba44ba527cc0964 | 1,872 | sql | SQL | Behavior_Analysis_Setup/StudentTimeAndPoints.sql | bbrub49/SQL-Code-Examples | 8c4be22f31e52b906d324174a93486a1ace0c4eb | [
"MIT"
] | null | null | null | Behavior_Analysis_Setup/StudentTimeAndPoints.sql | bbrub49/SQL-Code-Examples | 8c4be22f31e52b906d324174a93486a1ace0c4eb | [
"MIT"
] | null | null | null | Behavior_Analysis_Setup/StudentTimeAndPoints.sql | bbrub49/SQL-Code-Examples | 8c4be22f31e52b906d324174a93486a1ace0c4eb | [
"MIT"
] | null | null | null | /*
* Establishes the reason and minutes a student is absent
* if the absence is excused in order to remove those
* minutes from the students total time and total points
* possible to earn that day
*/
DECLARE @yeartype as varchar(15)
SET @yeartype = (SELECT [Type] FROM NSSEOPulse.dbo.ZM_TEST_BB_SchoolYear WHERE [Description] = 'Current')
IF @yeartype = 'Summer'
SELECT ArchYear, dp.Site, dp.StudentID, StartDate, EndDate, RngS, RngE, ServiceCode, ProgramDescription, EntryValue,
GroupNumber, StartWk, EndWk, DayDate, DtRng, TotalDays,
(CAST(MinPossible as float) - CAST(MinPresent as float)) as MinutesAbsent,
'ESY' as ReasonCode, 0.00 as ActualAbsence
FROM ZZ_TEST_BB_20DayDataPrep dp LEFT OUTER JOIN (
SELECT SIte, SchoolYear, StudentID, AtnDate, SubjSec, MinPresent, MinPossible, ATNSpecialCode, ATNSpecialComment
FROM ZZ_TEST_ATNSpecialAdditional
WHERE SIte = 'TR'
) as ma ON dp.StudentID = ma.StudentID AND dp.DayDate = CONVERT(Date,ma.AtnDate)
ELSE
SELECT ArchYear, dp.Site, dp.StudentID, StartDate, EndDate, RngS, RngE, ServiceCode, ProgramDescription, EntryValue,
GroupNumber, StartWk, EndWk, DayDate, DtRng, TotalDays,
CASE
WHEN TotalMinutesAbs IS NULL THEN 0
ELSE TotalMinutesAbs
END AS ActualAbsence,
CASE
WHEN (ReasonCode IN ('E','L','X','M')) OR (ReasonCode = 'G' AND ServiceCode IN ('1','2','3')) THEN TotalMinutesAbs
ELSE 0
END AS MinutesAbsent,
ReasonCode
FROM ZZ_TEST_BB_20DayDataPrep dp LEFT OUTER JOIN (
SELECT DISTINCT PrimarySite, ma.StudentID, ma.TotalMinutesAbs, Minutespossible, ma.AttendanceDate, ReasonCode
FROM ZZ_TEST_ATNStudentDetail ma INNER JOIN ZZ_TEST_ATNAttendanceMarks am ON ma.StudentID = am.StudentID AND ma.AttendanceDate = am.AbsentDate
WHERE PrimarySite = 'TR'
) as ma ON dp.StudentID = ma.StudentID AND CONVERT(Date,dp.DayDate) = CONVERT(Date,ma.AttendanceDate) | 42.545455 | 143 | 0.76015 | 3.203125 |
f07411bf6835efa66845aedc9d0915e9f4597ba2 | 1,138 | py | Python | UnitTests/FullAtomModel/CoordsTransform/test_forward.py | johahi/TorchProteinLibrary | b1fc9faa9b51c4550e5f754d075766ba38e0f8a0 | [
"MIT"
] | null | null | null | UnitTests/FullAtomModel/CoordsTransform/test_forward.py | johahi/TorchProteinLibrary | b1fc9faa9b51c4550e5f754d075766ba38e0f8a0 | [
"MIT"
] | null | null | null | UnitTests/FullAtomModel/CoordsTransform/test_forward.py | johahi/TorchProteinLibrary | b1fc9faa9b51c4550e5f754d075766ba38e0f8a0 | [
"MIT"
] | null | null | null | import sys
import os
import torch
import numpy as np
from TorchProteinLibrary.FullAtomModel.CoordsTransform import CoordsTranslate, getRandomTranslation, getBBox, CoordsRotate, getRandomRotation
from TorchProteinLibrary.FullAtomModel import Angles2Coords, Coords2TypedCoords
def test_translation(coords, num_atoms):
translate = CoordsTranslate()
a,b = getBBox(coords, num_atoms)
center = (a+b)*0.5
print (center)
centered_coords = translate(coords, -center, num_atoms)
a,b = getBBox(centered_coords, num_atoms)
center = (a+b)*0.5
print(center)
def test_rotation(coords, num_atoms):
batch_size = num_atoms.size(0)
R = getRandomRotation(batch_size)
rotate = CoordsRotate()
rotated = rotate(coords, R, num_atoms)
print(rotated)
if __name__=='__main__':
sequences = ['GGGGGG', 'GGAARRRRRRRRR']
angles = torch.zeros(2, 7,len(sequences[1]), dtype=torch.double)
angles[:,0,:] = -1.047
angles[:,1,:] = -0.698
angles[:,2:,:] = 110.4*np.pi/180.0
a2c = Angles2Coords()
protein, res_names, atom_names, num_atoms = a2c(angles, sequences)
test_translation(protein, num_atoms)
test_rotation(protein, num_atoms)
| 25.863636 | 141 | 0.748682 | 3.203125 |
a179af87613d1e41c4a92bf0b289fa58d5086d23 | 2,360 | go | Go | server/server.go | asommer70/evergreeen-new-releases | ebf1a02a2b199d68a138ad4b5e3b76ae63aa272a | [
"MIT"
] | null | null | null | server/server.go | asommer70/evergreeen-new-releases | ebf1a02a2b199d68a138ad4b5e3b76ae63aa272a | [
"MIT"
] | null | null | null | server/server.go | asommer70/evergreeen-new-releases | ebf1a02a2b199d68a138ad4b5e3b76ae63aa272a | [
"MIT"
] | null | null | null | package main
import (
"fmt"
"github.com/anaskhan96/soup"
"net/http"
//"io/ioutil"
"encoding/json"
"os"
"strconv"
"strings"
"time"
)
type TitleSearchResult struct {
Query string
Results []TitleResult
}
type TitleResult struct {
Name, Description, Url []string
}
func main() {
// titles := getSearchPage()
// fmt.Println("titles:", titles)
// for _, title := range titles {
// // TODO:as create Go Routines for getting information for each title.
// }
getMovieInfo()
}
func getSearchPage() []string {
base_url := "http://nccardinal.org"
library_number := 132
search_url := "/eg/opac/results?bool=and&qtype=keyword&contains=contains&query=&bool=and&qtype=title&contains=contains&query=&bool=and&qtype=author&contains=contains&query=&_adv=1&detail_record_view=0&fi%3Aitem_type=g&fi%3Avr_format=v&locg=" + strconv.Itoa(library_number) + "&pubdate=is&date1=&date2=&sort=pubdate.descending"
url := base_url + search_url
//fmt.Println("url:", url)
resp, err := soup.Get(url)
if err != nil {
os.Exit(1)
}
doc := soup.HTMLParse(resp)
links := doc.FindAll("a", "class", "record_title search_link")
//fmt.Println(links)
// TODO:as also get the library link for each movie.
titles := make([]string, len(links))
for _, link := range links {
// fmt.Println(link.Text(), "| Link :", link.Attrs()["href"])
//fmt.Println(strings.TrimSpace(strings.Split(link.Text(), "[videorecording]")[0]))
titles = append(titles, strings.TrimSpace(strings.Split(link.Text(), "[videorecording]")[0]))
}
return titles
}
func getMovieInfo() {
title := "The Post"
searchUrl := "https://en.wikipedia.org/w/api.php?action=opensearch&format=json&search="
//searchRes := TitleSearchResult{}
var model []interface{}
getJson(searchUrl + title, &model)
}
var myClient = &http.Client{Timeout: 10 * time.Second}
func getJson(url string, target interface{}) error {
r, err := myClient.Get(url)
if err != nil {
return err
}
defer r.Body.Close()
if err := json.Unmarshal([]byte(r.Body), target); err != nil {
fmt.Println("err:", err)
}
// fmt.Println("searchRes:", model)
for _, x := range model {
switch value := x.(type) {
case string:
fmt.Println(value)
case []interface{}:
for _, v := range value {
fmt.Println(v.(string))
}
}
}
return json.NewDecoder(r.Body).Decode(target)
}
| 23.6 | 327 | 0.665254 | 3.09375 |
20f45f70e14928808cae9d38bd843ce4c03dd39d | 2,739 | lua | Lua | rot/rot/noise/simplex.lua | LJNIC/Collaborogue | 9c333e790f043087045d9236ccde0e496ebfc96b | [
"MIT"
] | 4 | 2020-11-09T23:33:48.000Z | 2021-04-19T23:14:50.000Z | rot/rot/noise/simplex.lua | LJNIC/Collaborogue | 9c333e790f043087045d9236ccde0e496ebfc96b | [
"MIT"
] | 1 | 2021-11-13T15:53:53.000Z | 2021-11-13T15:53:53.000Z | rot/rot/noise/simplex.lua | LJNIC/Collaborogue | 9c333e790f043087045d9236ccde0e496ebfc96b | [
"MIT"
] | 2 | 2021-11-11T03:25:30.000Z | 2022-02-13T13:05:00.000Z | --- Simplex Noise Generator.
-- Based on a simple 2d implementation of simplex noise by Ondrej Zara
-- Which is based on a speed-improved simplex noise algorithm for 2D, 3D and 4D in Java.
-- Which is based on example code by Stefan Gustavson ([email protected]).
-- With Optimisations by Peter Eastman ([email protected]).
-- Better rank ordering method by Stefan Gustavson in 2012.
-- @module ROT.Noise.Simplex
local ROT = require((...):gsub(('.[^./\\]*'):rep(2) .. '$', ''))
local Simplex = ROT.Noise:extend("Simplex")
--- Constructor.
-- 2D simplex noise generator.
-- @tparam int gradients The random values for the noise.
function Simplex:init(gradients)
self._F2 = .5 * (math.sqrt(3) - 1)
self._G2 = (3 - math.sqrt(3)) / 6
self._gradients = {
{ 0, - 1},
{ 1, - 1},
{ 1, 0},
{ 1, 1},
{ 0, 1},
{ - 1, 1},
{ - 1, 0},
{ - 1, - 1}
}
local permutations = {}
local count = gradients and gradients or 256
for i = 1, count do
table.insert(permutations, i)
end
permutations = table.randomize(permutations)
self._perms = {}
self._indexes = {}
for i = 1, 2 * count do
table.insert(self._perms, permutations[i%count + 1])
table.insert(self._indexes, self._perms[i] % #self._gradients + 1)
end
end
--- Get noise for a cell
-- Iterate over this function to retrieve noise values
-- @tparam int xin x-position of noise value
-- @tparam int yin y-position of noise value
function Simplex:get(xin, yin)
local perms = self._perms
local indexes = self._indexes
local count = #perms / 2
local G2 = self._G2
local n0, n1, n2, gi = 0, 0, 0
local s = (xin + yin) * self._F2
local i = math.floor(xin + s)
local j = math.floor(yin + s)
local t = (i + j) * G2
local X0 = i - t
local Y0 = j - t
local x0 = xin - X0
local y0 = yin - Y0
local i1, j1
if x0 > y0 then
i1 = 1
j1 = 0
else
i1 = 0
j1 = 1
end
local x1 = x0 - i1 + G2
local y1 = y0 - j1 + G2
local x2 = x0 - 1 + 2 * G2
local y2 = y0 - 1 + 2 * G2
local ii = i%count + 1
local jj = j%count + 1
local t0 = .5 - x0 * x0 - y0 * y0
if t0 >= 0 then
t0 = t0 * t0
gi = indexes[ii + perms[jj]]
local grad = self._gradients[gi]
n0 = t0 * t0 * (grad[1] * x0 + grad[2] * y0)
end
local t1 = .5 - x1 * x1 - y1 * y1
if t1 >= 0 then
t1 = t1 * t1
gi = indexes[ii + i1 + perms[jj + j1]]
local grad = self._gradients[gi]
n1 = t1 * t1 * (grad[1] * x1 + grad[2] * y1)
end
local t2 = .5 - x2 * x2 - y2 * y2
if t2 >= 0 then
t2 = t2 * t2
gi = indexes[ii + 1 + perms[jj + 1]]
local grad = self._gradients[gi]
n2 = t2 * t2 * (grad[1] * x2 + grad[2] * y2)
end
return 70 * (n0 + n1 + n2)
end
return Simplex
| 25.12844 | 88 | 0.586345 | 3.328125 |
330bc2029c1246f778fe532317958ef2c30db80a | 10,719 | py | Python | touca/_case.py | trytouca/touca-python | dab4bb6760a173952b63ea14fd4bc30c3877744e | [
"Apache-2.0"
] | 11 | 2021-06-29T04:51:28.000Z | 2022-03-22T05:58:44.000Z | touca/_case.py | trytouca/touca-python | dab4bb6760a173952b63ea14fd4bc30c3877744e | [
"Apache-2.0"
] | null | null | null | touca/_case.py | trytouca/touca-python | dab4bb6760a173952b63ea14fd4bc30c3877744e | [
"Apache-2.0"
] | null | null | null | # Copyright 2021 Touca, Inc. Subject to Apache-2.0 License.
from ._types import IntegerType, VectorType, ToucaType
from datetime import datetime, timedelta
from enum import Enum
from typing import Dict, Tuple
class ResultCategory(Enum):
""" """
Check = 1
Assert = 2
class ResultEntry:
"""
Wrapper around a given ``ToucaType`` value that includes the category
it should belong to.
We are intentionally not using ``@dataclass`` to ensure the core library
has no dependency on ``dataclasses`` module. This may change in the future.
"""
def __init__(self, typ: ResultCategory, val: ToucaType):
"""
Creates an entry given its value and the category it should belong to.
:param typ: type of the entry
:param val: value of the entry
"""
self.typ = typ
self.val = val
class Case:
""" """
def __init__(self, **kwargs):
self._meta = kwargs
self._results: Dict[str, ResultEntry] = dict()
self._tics: Dict[str, datetime] = dict()
self._tocs: Dict[str, datetime] = dict()
def check(self, key: str, value: ToucaType):
"""
Logs a given value as a test result for the declared test case
and associates it with the specified key.
:param key: name to be associated with the logged test result
:param value: value to be logged as a test result
"""
self._results[key] = ResultEntry(typ=ResultCategory.Check, val=value)
def assume(self, key: str, value: ToucaType):
"""
Logs a given value as an assertion for the declared test case
and associates it with the specified key.
:param key: name to be associated with the logged test result
:param value: value to be logged as a test result
"""
self._results[key] = ResultEntry(typ=ResultCategory.Assert, val=value)
def add_array_element(self, key: str, value: ToucaType):
"""
Adds a given value to a list of results for the declared
test case which is associated with the specified key.
Could be considered as a helper utility function.
This method is particularly helpful to log a list of items as they
are found:
.. code-block:: python
for number in numbers:
if is_prime(number):
touca.add_array_element("prime numbers", number)
touca.add_hit_count("number of primes")
This pattern can be considered as a syntactic sugar for the following
alternative:
.. code-block:: python
primes = []
for number in numbers:
if is_prime(number):
primes.append(number)
if primes:
touca.check("prime numbers", primes)
touca.check("number of primes", len(primes))
The items added to the list are not required to be of the same type.
The following code is acceptable:
.. code-block:: python
touca.check("prime numbers", 42)
touca.check("prime numbers", "forty three")
:raises RuntimeError:
if specified key is already associated with
a test result which was not iterable
:param key: name to be associated with the logged test result
:param value: element to be appended to the array
:see also: :py:meth:`~check`
"""
if key not in self._results:
self._results[key] = ResultEntry(typ=ResultCategory.Check, val=VectorType())
vec = self._results.get(key)
if vec.typ is not ResultCategory.Check or not isinstance(vec.val, VectorType):
raise RuntimeError("specified key has a different type")
vec.val.add(value)
def add_hit_count(self, key: str):
"""
Increments value of key every time it is executed.
creates the key with initial value of one if it does not exist.
Could be considered as a helper utility function.
This method is particularly helpful to track variables whose values
are determined in loops with indeterminate execution cycles:
.. code-block:: python
for number in numbers:
if is_prime(number):
touca.add_array_element("prime numbers", number)
touca.add_hit_count("number of primes")
This pattern can be considered as a syntactic sugar for the following
alternative:
.. code-block:: python
primes = []
for number in numbers:
if is_prime(number):
primes.append(number)
if primes:
touca.check("prime numbers", primes)
touca.check("number of primes", len(primes))
:raises RuntimeError:
if specified key is already associated with
a test result which was not an integer
:param key: name to be associated with the logged test result
:see also: :py:meth:`~check`
"""
if key not in self._results:
self._results[key] = ResultEntry(
typ=ResultCategory.Check, val=IntegerType(1)
)
return
value = self._results.get(key)
if value.typ is not ResultCategory.Check or not isinstance(
value.val, IntegerType
):
raise RuntimeError("specified key has a different type")
value.val._value += 1
def add_metric(self, key: str, milliseconds: int):
"""
Adds an already obtained measurements to the list of captured
performance benchmarks.
Useful for logging a metric that is measured without using this SDK.
:param key: name to be associated with this performance benchmark
:param milliseconds: duration of this measurement in milliseconds
"""
value = datetime.now()
self._tics[key] = value
self._tocs[key] = value + timedelta(microseconds=milliseconds * 1000)
def start_timer(self, key: str):
"""
Starts timing an event with the specified name.
Measurement of the event is only complete when function
:py:meth:`~stop_timer` is later called for the specified name.
:param key: name to be associated with the performance metric
"""
self._tics[key] = datetime.now()
def stop_timer(self, key: str):
"""
Stops timing an event with the specified name.
Expects function :py:meth:`~start_timer` to have been called previously
with the specified name.
:param key: name to be associated with the performance metric
"""
if key in self._tics:
self._tocs[key] = datetime.now()
def _metrics(self) -> Tuple[str, ToucaType]:
for key, tic in self._tics.items():
if key not in self._tocs:
continue
diff = (self._tocs.get(key) - tic).microseconds / 1000
yield key, IntegerType(int(diff))
def _metadata(self) -> Dict[str, str]:
return {
"teamslug": self._meta.get("team") or "unknown",
"testsuite": self._meta.get("suite") or "unknown",
"version": self._meta.get("version") or "unknown",
"testcase": self._meta.get("name") or "unknown",
"builtAt": datetime.now().isoformat(),
}
def json(self):
return {
"metadata": self._metadata(),
"results": [
{"key": k, "value": v.val.json()}
for k, v in self._results.items()
if v.typ is ResultCategory.Check
],
"assertions": [
{"key": k, "value": v.val.json()}
for k, v in self._results.items()
if v.typ is ResultCategory.Assert
],
"metrics": [{"key": k, "value": v.json()} for k, v in self._metrics()],
}
def serialize(self) -> bytearray:
from flatbuffers import Builder
import touca._schema as schema
dicts = {
ResultCategory.Check: schema.ResultType.Check,
ResultCategory.Assert: schema.ResultType.Assert,
}
builder = Builder(1024)
metadata = {k: builder.CreateString(v) for k, v in self._metadata().items()}
schema.MetadataStart(builder)
schema.MetadataAddTeamslug(builder, metadata.get("teamslug"))
schema.MetadataAddTestsuite(builder, metadata.get("testsuite"))
schema.MetadataAddVersion(builder, metadata.get("version"))
schema.MetadataAddTestcase(builder, metadata.get("testcase"))
schema.MetadataAddBuiltAt(builder, metadata.get("builtAt"))
fbs_metadata = schema.MetadataEnd(builder)
result_entries = []
for k, v in self._results.items():
fbs_key = Builder.CreateString(builder, k)
fbs_value = v.val.serialize(builder)
schema.ResultStart(builder)
schema.ResultAddKey(builder, fbs_key)
schema.ResultAddValue(builder, fbs_value)
schema.ResultAddTyp(builder, dicts.get(v.typ))
result_entries.append(schema.ResultEnd(builder))
schema.ResultsStartEntriesVector(builder, len(result_entries))
for item in reversed(result_entries):
builder.PrependUOffsetTRelative(item)
fbs_result_entries = builder.EndVector()
schema.ResultsStart(builder)
schema.ResultsAddEntries(builder, fbs_result_entries)
fbs_results = schema.ResultsEnd(builder)
metric_entries = []
for k, v in self._metrics():
fbs_key = Builder.CreateString(builder, k)
fbs_value = v.serialize(builder)
schema.MetricStart(builder)
schema.MetricAddKey(builder, fbs_key)
schema.MetricAddValue(builder, fbs_value)
metric_entries.append(schema.MetricEnd(builder))
schema.MetricsStartEntriesVector(builder, len(metric_entries))
for item in reversed(metric_entries):
builder.PrependUOffsetTRelative(item)
fbs_metric_entries = builder.EndVector()
schema.MetricsStart(builder)
schema.MetricsAddEntries(builder, fbs_metric_entries)
fbs_metrics = schema.MetricsEnd(builder)
schema.MessageStart(builder)
schema.MessageAddMetadata(builder, fbs_metadata)
schema.MessageAddResults(builder, fbs_results)
schema.MessageAddMetrics(builder, fbs_metrics)
fbs_message = schema.MessageEnd(builder)
builder.Finish(fbs_message)
return builder.Output()
| 35.376238 | 88 | 0.612184 | 3.21875 |
198f09ec70a864f74973126efad673b62438636e | 2,138 | lua | Lua | gateway/src/apicast/loader.lua | SpillChek2/apicast | 55011497b6a2badd1ca1097c4d5221a842d80faf | [
"Apache-2.0"
] | null | null | null | gateway/src/apicast/loader.lua | SpillChek2/apicast | 55011497b6a2badd1ca1097c4d5221a842d80faf | [
"Apache-2.0"
] | null | null | null | gateway/src/apicast/loader.lua | SpillChek2/apicast | 55011497b6a2badd1ca1097c4d5221a842d80faf | [
"Apache-2.0"
] | 1 | 2018-04-09T08:45:39.000Z | 2018-04-09T08:45:39.000Z | --- APIcast source loader
-- Loading this module will add a new source code loaders to package.searchers.
-- The searcher is going to print deprecation warnings when apicast source is loaded
-- through old or non prefixed paths.
-- We can rename files and set up an alias here so we don't break customer's code and
-- print a deprecation warning.
-- Another searcher is going to look for policies with `.policy` suffix.
-- Policies can be packaged as `some_name/policy.lua` so the directory also contains the JSON spec.
local loadfile = loadfile
local sub = string.sub
local policy_loader = require 'apicast.policy_loader'
local map = {
['apicast'] = 'apicast.policy.apicast'
}
local function loader(name, path)
local file, err = package.searchpath(name, path)
if file then
file, err = loadfile(file)
end
return file, err
end
--- Searcher has to return the loader or an error message.
local function policy_searcher(name)
if sub(name, 1, 15) == 'apicast.policy.' then
local mod = policy_loader:pcall(sub(name, 16), 'builtin')
if mod then return function () return mod end end
end
end
local function prefix_loader(name, path)
local prefixed = 'apicast.' .. name
local found, err = loader(prefixed, path)
if not found then
found = policy_searcher(prefixed)
end
if found then
ngx.log(ngx.STDERR, 'DEPRECATION: when loading apicast code use correct prefix: require("', prefixed, '")')
end
return found or err
end
local function rename_loader(name, path)
local new = map[name]
local found, err = policy_searcher(new)
if not found then
found = loader(new, path)
end
if found then
ngx.log(ngx.WARN, 'DEPRECATION: file renamed - change: require("', name, '")' ,' to: require("', new, '")')
end
return found or err
end
local function apicast_namespace(name)
local path = package.path
if not package.searchpath(name, path) then
if map[name] then
return rename_loader(name, path)
else
return prefix_loader(name, path)
end
end
end
table.insert(package.searchers, policy_searcher)
table.insert(package.searchers, apicast_namespace)
| 26.073171 | 111 | 0.717025 | 3.125 |
85cb68d2f275c7ab63531b3fa937fa3e509ed2a7 | 1,258 | c | C | sdk/openrtos/boot/fa626/lcd_clear.c | doyaGu/C0501Q_HWJL01 | 07a71328bd9038453cbb1cf9c276a3dd1e416d63 | [
"MIT"
] | 1 | 2021-10-09T08:05:50.000Z | 2021-10-09T08:05:50.000Z | sdk/openrtos/boot/fa626/lcd_clear.c | doyaGu/C0501Q_HWJL01 | 07a71328bd9038453cbb1cf9c276a3dd1e416d63 | [
"MIT"
] | null | null | null | sdk/openrtos/boot/fa626/lcd_clear.c | doyaGu/C0501Q_HWJL01 | 07a71328bd9038453cbb1cf9c276a3dd1e416d63 | [
"MIT"
] | null | null | null | #include "ite/ith.h"
#define REMAP_ADDR 0x80000000
// _start is default function name of entry point.
void _start(void)
{
uint32_t* ptr;
uint32_t size;
uint32_t color, i;
asm volatile("mcr p15, 0, %0, c7, c14, 0" : : "r"(0)); // clean and invalidate D-Cache all
asm volatile("mcr p15, 0, %0, c7, c5, 0" : : "r"(0)); // invalidate I-Cache all
ptr = (uint32_t*)(ithLcdGetBaseAddrA() + REMAP_ADDR);
size = ithLcdGetPitch() * ithLcdGetHeight();
#if CFG_LCD_BPP == 2
color = ITH_RGB565((CFG_LCD_BOOT_BGCOLOR >> 16) & 0xFF, (CFG_LCD_BOOT_BGCOLOR >> 8) & 0xFF, CFG_LCD_BOOT_BGCOLOR & 0xFF);
color |= color << 16;
#elif CFG_LCD_BPP == 4
color = CFG_LCD_BOOT_BGCOLOR;
#elif CFG_LCD_BPP == 0
#error "0 LCD BPP"
#else
#error "Unknown LCD BPP"
#endif
for (i = 0; i < size / (sizeof(uint32_t)*8); i++)
{
*ptr++ = color;
*ptr++ = color;
*ptr++ = color;
*ptr++ = color;
*ptr++ = color;
*ptr++ = color;
*ptr++ = color;
*ptr++ = color;
// FIXME: workaround for IT9850
#if (CFG_CHIP_FAMILY == 9850)
{
asm volatile("mcr p15, 0, %0, c7, c10, 4" : : "r"(0)); // sync (drain write buffer)
}
#endif
}
}
| 26.208333 | 125 | 0.556439 | 3.078125 |
0cc14f945ff11b1ec78d14d582d03623e82355fd | 4,657 | py | Python | tools/multiscale_shape.py | marvin-eisenberger/hamiltonian-interpolation | d18c2f401feffc672998c5fa1d50c1de03dba902 | [
"MIT"
] | 5 | 2021-01-05T23:16:55.000Z | 2021-07-23T12:26:06.000Z | tools/multiscale_shape.py | marvin-eisenberger/hamiltonian-interpolation | d18c2f401feffc672998c5fa1d50c1de03dba902 | [
"MIT"
] | null | null | null | tools/multiscale_shape.py | marvin-eisenberger/hamiltonian-interpolation | d18c2f401feffc672998c5fa1d50c1de03dba902 | [
"MIT"
] | 1 | 2021-02-22T08:31:05.000Z | 2021-02-22T08:31:05.000Z | import torch
from shape_utils import Shape, load_shape_pair, scatter_shape_pair
from torch_geometric.nn import knn
from param import *
from arap_potential import arap_vert
def load_multiscale_shapes(folder_path, file_name, scales, offset=0.5*torch.ones([3], device=device, dtype=torch.float32)):
"""Like 'load_shape_pair' but for shapes with different resolutions"""
vert_x_array = []
triv_x_array = []
vert_y_array = []
triv_y_array = []
for i_scale in range(len(scales)):
file_load = folder_path + "sub_" + str(scales[i_scale]) + "/" + file_name
shape_x, shape_y = load_shape_pair(file_load, offset)
vert_x_array.append(shape_x.vert)
vert_y_array.append(shape_y.vert)
triv_x_array.append(shape_x.triv)
triv_y_array.append(shape_y.triv)
shape_x = MultiscaleShape(vert_x_array, triv_x_array)
shape_y = MultiscaleShape(vert_y_array, triv_y_array)
return shape_x, shape_y
class MultiscaleShape(Shape):
"""Class for shapes with multiple resolutions.
Attributes beyond the base class 'Shape' are:
vert_array: List of vertices with different resolutions
triv_array: List of triangles with different resolutions
scale_idx: The index describing the current resolution --
The current vertices are vert_array[scale_idx]
ass_[array/vecs/weights]: attributes needed to apply an interpolation
on scale 'scale_idx' to the next resolution '(scale_idx+1)'
"""
def __init__(self, vert_array, triv_array):
super().__init__(vert_array[0], triv_array[0])
self.vert_array = vert_array
self.triv_array = triv_array
self.scale_idx = 0
self.scale_idx_len = len(vert_array)
self.ass_array = None
self.ass_vecs = None
self.ass_weights = None
self.init_upscale()
def set_scale_idx(self, scale_idx):
assert scale_idx >= 0 and scale_idx < self.scale_idx_len, "new index out of bounds"
self.vert_array[self.scale_idx] = self.vert
self.scale_idx = scale_idx
self.vert = self.vert_array[scale_idx]
self.triv = self.triv_array[scale_idx]
self.samples = list(range(self.vert.shape[0]))
self.neigh = None
def increase_scale_idx(self):
self.set_scale_idx(self.scale_idx+1)
def next_resolution(self):
return self.vert_array[self.scale_idx+1].shape
def init_upscale(self, num_knn=3):
self.ass_array = []
self.ass_vecs = []
self.ass_weights = []
for idx in range(self.scale_idx_len-1):
vert_i = self.vert_array[idx].to(device_cpu)
vert_ip1 = self.vert_array[idx+1].to(device_cpu)
ass_curr = knn(vert_i, vert_ip1, num_knn)
ass_curr = ass_curr[1, :].view(-1, num_knn)
self.ass_array.append(ass_curr.to(device)) #[n_vert_tp1, num_knn]
vec_curr = vert_ip1.unsqueeze(1) - vert_i[ass_curr, :]
self.ass_vecs.append(vec_curr.to(device)) #[n_vert_tp1, num_knn, 3]
weights_curr = 1/(torch.norm(vec_curr, dim=2, keepdim=True)+1e-5)
weights_curr = weights_curr / torch.sum(weights_curr, dim=1, keepdim=True)
self.ass_weights.append(weights_curr.to(device)) #[n_vert_tp1, num_knn, 1]
def apply_upsampling(self, vert_t):
R = arap_vert(vert_t, self.vert, self.get_neigh()) #[n_vert_tp1, 3, 3]
ass_curr = self.ass_array[self.scale_idx]
vec_curr = self.ass_vecs[self.scale_idx]
weights_curr = self.ass_weights[self.scale_idx]
vert_tp1 = vert_t[ass_curr, :] + torch.matmul(R[ass_curr], vec_curr.unsqueeze(3)).squeeze() #[n_vert_tp1, num_knn, 3]
vert_tp1 = torch.sum(weights_curr * vert_tp1, dim=1)
return vert_tp1
def rotate(self, R):
for i in range(self.scale_idx_len):
self.vert_array[i] = torch.mm(self.vert_array[i], R.transpose(0, 1))
self.vert = self.vert_array[self.scale_idx]
self.init_upscale()
def to_box(self, shape_y):
scale_idx = self.scale_idx
for i in range(self.scale_idx_len):
self.set_scale_idx(i)
shape_y.set_scale_idx(i)
super().to_box(shape_y)
self.set_scale_idx(scale_idx)
shape_y.set_scale_idx(scale_idx)
self.init_upscale()
def scale(self, factor, shift=True):
scale_idx = self.scale_idx
for i in range(self.scale_idx_len):
self.set_scale_idx(i)
super().scale(factor, shift)
self.set_scale_idx(scale_idx)
self.init_upscale()
if __name__ == "__main__":
print("main of multiscale_shape.py")
| 33.503597 | 126 | 0.665235 | 3.34375 |
f05bdaed59cf5073cab62db01710a16ba5ff7771 | 7,597 | py | Python | app/views.py | PaulMurrayCbr/GameNight | 838c19dda765027abbe8e12e331268b01cb859c2 | [
"Unlicense"
] | null | null | null | app/views.py | PaulMurrayCbr/GameNight | 838c19dda765027abbe8e12e331268b01cb859c2 | [
"Unlicense"
] | null | null | null | app/views.py | PaulMurrayCbr/GameNight | 838c19dda765027abbe8e12e331268b01cb859c2 | [
"Unlicense"
] | null | null | null | from app import app, db
from flask import render_template, flash, redirect, get_flashed_messages
import forms
import models
import Character
from flask.globals import request
from sqlalchemy.orm.exc import NoResultFound, MultipleResultsFound
def menugear() :
return {
'pcs': models.Character.query.all()
}
@app.route('/')
@app.route('/index')
def index():
return render_template('index.html', menu=menugear())
@app.route('/whiteboard')
def whiteboard():
return render_template('whiteboard.html', menu=menugear())
@app.route('/pc/<name>/')
def character(name):
try:
pc = models.Character.query.filter_by(name=name).one()
updatepc_form=forms.PC(obj=pc)
newhp_form=forms.HP()
openhpbreakdown = False
states = get_flashed_messages(category_filter=['viewstate'])
if states:
for state in states:
if state['hpopen']:
openhpbreakdown = True
return render_template('pc.html',
updatepc_form=updatepc_form,
newhp_form = newhp_form,
pc=pc,
pcinfo=Character.buildInfo(pc),
menu=menugear(),
openhpbreakdown = openhpbreakdown)
except MultipleResultsFound, e:
flash(('Found multiple characters named %s' % name, 'danger'), 'msg')
pc = None
except NoResultFound, e:
flash(('PC %s not found' % name, 'warning'), 'msg')
pc = None
return redirect('/')
@app.route('/pc/<name>/update.do', methods=['POST'])
def do_updatepc(name):
try:
pc = models.Character.query.filter_by(name=name).one()
updatepc_form=forms.PC(obj=pc)
pc.abbrev = updatepc_form.abbrev.data
pc.name = updatepc_form.name.data
pc.pname = updatepc_form.pname.data
db.session.commit()
return redirect('/pc/%s' % pc.name)
except MultipleResultsFound, e:
flash(('Found multiple characters named %s' % name, 'danger'), 'msg')
pc = None
except NoResultFound, e:
flash(('PC %s not found' % name, 'warning'), 'msg')
pc = None
return redirect('/')
@app.route('/pc/<name>/addhptype.do', methods=['POST'])
def do_addhptypepc(name):
try:
pc = models.Character.query.filter_by(name=name).one()
newhp_form=forms.HP(obj=pc)
hp = models.Hp(
character_id = pc.id,
source = newhp_form.source.data,
max = newhp_form.max.data,
current = newhp_form.max.data,
ablative_only = newhp_form.ablative_only.data
)
db.session.add(hp)
db.session.commit()
flash({'hpopen':True}, 'viewstate')
return redirect('/pc/%s' % pc.name)
except MultipleResultsFound, e:
flash(('Found multiple characters named %s' % name, 'danger'), 'msg')
pc = None
except NoResultFound, e:
flash(('PC %s not found' % name, 'warning'), 'msg')
pc = None
return redirect('/')
@app.route('/pc/<name>/hp/<id>/set.do', methods=['GET', 'POST'])
def do_sethppc(name, id):
try:
pc = models.Character.query.filter_by(name=name).one()
hp = models.Hp.query.get(id)
if not hp:
flash(("HP %s not found" % id , 'danger'), 'msg')
elif hp.character_id != pc.id:
flash(("HP %s belongs to %s" % (id, hp.character.name) , 'danger'), 'msg')
else:
v = request.args.get('v', '')
if not v or v == '':
flash(("no new value specified" , 'warning'), 'msg')
else:
try:
v = int(v)
except ValueError, e:
flash(("'%s' does not appear to be a number" % v, 'warning'), 'msg')
hp.current = v
db.session.commit()
flash(("Set current to %d" % v , 'success'), 'msg')
flash({'hpopen':True}, 'viewstate')
return redirect('/pc/%s' % pc.name)
except MultipleResultsFound, e:
flash(('Found multiple characters named %s' % name, 'danger'), 'msg')
pc = None
except NoResultFound, e:
flash(('PC %s not found' % name, 'warning'), 'msg')
pc = None
return redirect('/')
@app.route('/pc/<name>/hp/<id>/max.do', methods=['GET', 'POST'])
def do_maxhppc(name, id):
try:
pc = models.Character.query.filter_by(name=name).one()
hp = models.Hp.query.get(id)
if not hp:
flash(("HP %s not found" % id , 'danger'), 'msg')
elif hp.character_id != pc.id:
flash(("HP %s belongs to %s" % (id, hp.character.name) , 'danger'), 'msg')
else:
v = request.args.get('v', '')
if not v or v == '':
flash(("no new value specified" , 'warning'), 'msg')
else:
try:
v = int(v)
except ValueError, e:
flash(("'%s' does not appear to be a number" % v, 'warning'), 'msg')
hp.max = v
db.session.commit()
flash(("Set max to %d" % v , 'success'), 'msg')
flash({'hpopen':True}, 'viewstate')
return redirect('/pc/%s' % pc.name)
except MultipleResultsFound, e:
flash(('Found multiple characters named %s' % name, 'danger'), 'msg')
pc = None
except NoResultFound, e:
flash(('PC %s not found' % name, 'warning'), 'msg')
pc = None
return redirect('/')
@app.route('/pc/<name>/hp/<id>/add.do', methods=['GET', 'POST'])
def do_addhppc(name, id):
try:
pc = models.Character.query.filter_by(name=name).one()
hp = models.Hp.query.get(id)
if not hp:
flash(("HP %s not found" % id , 'danger'), 'msg')
elif hp.character_id != pc.id:
flash(("HP %s belongs to %s" % (id, hp.character.name) , 'danger'), 'msg')
else:
v = request.args.get('v', '')
if not v or v == '':
flash(("no new value specified" , 'warning'), 'msg')
else:
try:
v = int(v)
except ValueError, e:
flash(("'%s' does not appear to be a number" % v, 'warning'), 'msg')
hp.current += v
db.session.commit()
if v < 0:
flash(("Subtracted %d" % -v , 'success'), 'msg')
else:
flash(("Added %d" % v , 'success'), 'msg')
flash({'hpopen':True}, 'viewstate')
return redirect('/pc/%s' % pc.name)
except MultipleResultsFound, e:
flash(('Found multiple characters named %s' % name, 'danger'), 'msg')
pc = None
except NoResultFound, e:
flash(('PC %s not found' % name, 'warning'), 'msg')
pc = None
return redirect('/')
@app.route('/pc/<name>/hp/<id>/zap.do', methods=['GET', 'POST'])
def do_zaphppc(name, id):
try:
pc = models.Character.query.filter_by(name=name).one()
hp = models.Hp.query.get(id)
if not hp:
flash(("HP %s not found" % id , 'danger'), 'msg')
elif hp.character_id != pc.id:
flash(("HP %s belongs to %s" % (id, hp.character.name) , 'danger'), 'msg')
else:
db.session.delete(hp)
db.session.commit()
flash(("Deleted" , 'success'), 'msg')
flash({'hpopen':True}, 'viewstate')
return redirect('/pc/%s' % pc.name)
except MultipleResultsFound, e:
flash(('Found multiple characters named %s' % name, 'danger'), 'msg')
pc = None
except NoResultFound, e:
flash(('PC %s not found' % name, 'warning'), 'msg')
pc = None
return redirect('/')
@app.route('/admin/pc/')
def adminpc():
pcs = models.Character.query.all()
return render_template('/admin/pcs.html',
pcs=pcs,
newpc_form=forms.PC(),
menu=menugear())
@app.route('/admin/pc/newpc.do', methods=['POST'])
def do_newpc():
form = forms.PC(request.form)
pc = models.Character(name=form.name.data, pname=form.pname.data, abbrev=form.abbrev.data)
db.session.add(pc)
db.session.commit()
flash(("New PC", 'success'), 'msg')
return redirect('/admin/pc/')
@app.route('/admin/pc/<id>/delete.do', methods=['GET'])
def do_deletepc(id):
pc = models.Character.query.get(id)
if not pc:
flash(("PC %s not found" % id , 'danger'), 'msg')
else :
db.session.delete(pc)
db.session.commit()
flash(("PC '%s' deleted" % pc.name , 'success'), 'msg')
return redirect('/admin/pc/')
| 26.939716 | 94 | 0.612874 | 3.03125 |
e73fffbbf34519bd85db2a58a307b41246e8a610 | 1,101 | js | JavaScript | src/0648.replace-words.648/0648.replace-words.648.js | jiangshanmeta/meta | 8f9d084cda91988d42208ac7a029612e9edc693b | [
"MIT"
] | 221 | 2018-10-26T07:05:12.000Z | 2022-03-30T03:23:10.000Z | src/0648.replace-words.648/0648.replace-words.648.js | ralap18/meta | 82d660a6eabb15e398a7dcc2a0fa99342143bb12 | [
"MIT"
] | 23 | 2018-09-24T14:50:58.000Z | 2020-09-17T14:23:45.000Z | src/0648.replace-words.648/0648.replace-words.648.js | ralap18/meta | 82d660a6eabb15e398a7dcc2a0fa99342143bb12 | [
"MIT"
] | 45 | 2019-03-29T03:36:19.000Z | 2022-03-25T20:57:13.000Z | /**
* @param {string[]} dict
* @param {string} sentence
* @return {string}
*/
// 前缀树
function findRoot (trie, string) {
const list = [];
for (let i = 0; i < string.length; i++) {
if (!trie) {
break;
}
list.push(string[i]);
trie = trie[string[i]];
}
return trie === null ? list.join('') : '';
}
var replaceWords = function (dict, sentence) {
const trie = {};
// 因为有多个root时要按短的算 先按长度从小到大排序
dict.sort((a, b) => a.length - b.length);
for (let i = 0; i < dict.length; i++) {
const string = dict[i];
// 考虑到dict中有的单词以dict中其他单词为根
if (findRoot(trie, string) === '') {
let prev = trie;
for (let j = 0; j < string.length - 1; j++) {
prev = prev[string[j]] || (prev[string[j]] = {});
}
// 最后一个字母 null结尾 便于在前缀树中确定结尾
prev[string[string.length - 1]] = null;
}
}
return sentence.split(' ').map((string) => {
const root = findRoot(trie, string);
return root === '' ? string : root;
}).join(' ');
};
| 26.214286 | 65 | 0.482289 | 3.234375 |
7a97019e955f84d6270a6a63776af848923fd024 | 2,666 | rs | Rust | engine/src/math/vector4.rs | monadgroup/re19 | 80989ebf8ae2a3e203a443e583a7f359f0114333 | [
"Apache-2.0"
] | 47 | 2021-10-04T13:51:31.000Z | 2022-03-27T17:23:50.000Z | engine/src/math/vector4.rs | monadgroup/re19 | 80989ebf8ae2a3e203a443e583a7f359f0114333 | [
"Apache-2.0"
] | null | null | null | engine/src/math/vector4.rs | monadgroup/re19 | 80989ebf8ae2a3e203a443e583a7f359f0114333 | [
"Apache-2.0"
] | null | null | null | use super::{Float, Vector3};
use core::ops;
#[derive(Clone, Copy, PartialEq, PartialOrd, Default, Debug)]
#[repr(C)]
pub struct Vector4 {
pub x: f32,
pub y: f32,
pub z: f32,
pub w: f32,
}
impl Vector4 {
pub fn with_x(self, x: f32) -> Self {
Vector4 { x, ..self }
}
pub fn with_y(self, y: f32) -> Self {
Vector4 { y, ..self }
}
pub fn with_z(self, z: f32) -> Self {
Vector4 { z, ..self }
}
pub fn with_w(self, w: f32) -> Self {
Vector4 { w, ..self }
}
pub fn dot(self, other: Vector4) -> f32 {
self.x * other.x + self.y * other.y + self.z * other.z + self.w * other.w
}
pub fn length_squared(self) -> f32 {
self.dot(self)
}
pub fn length(self) -> f32 {
self.length_squared().sqrt()
}
pub fn unit(self) -> Vector4 {
self / Vector4::from(self.length())
}
pub fn lerp(self, other: Vector4, t: f32) -> Vector4 {
self + (other - self) * t
}
pub fn unproject(self) -> Vector3 {
Vector3 {
x: self.x / self.w,
y: self.y / self.w,
z: self.z / self.w,
}
}
pub fn as_vec3(self) -> Vector3 {
Vector3 {
x: self.x,
y: self.y,
z: self.z,
}
}
pub fn floor(self) -> Vector4 {
Vector4 {
x: self.x.floor(),
y: self.y.floor(),
z: self.z.floor(),
w: self.w.floor(),
}
}
pub fn fract(self) -> Vector4 {
Vector4 {
x: self.x.fract(),
y: self.y.fract(),
z: self.z.fract(),
w: self.w.fract(),
}
}
}
define_vec!(Vector4 => (x, y, z, w));
impl ops::Neg for Vector4 {
type Output = Vector4;
fn neg(self) -> Vector4 {
Vector4 {
x: -self.x,
y: -self.y,
z: -self.z,
w: -self.w,
}
}
}
impl From<[f32; 4]> for Vector4 {
fn from([x, y, z, w]: [f32; 4]) -> Self {
Vector4 { x, y, z, w }
}
}
impl From<(f32, f32, f32, f32)> for Vector4 {
fn from((x, y, z, w): (f32, f32, f32, f32)) -> Self {
Vector4 { x, y, z, w }
}
}
impl From<f32> for Vector4 {
fn from(val: f32) -> Self {
Vector4 {
x: val,
y: val,
z: val,
w: val,
}
}
}
impl Into<[f32; 4]> for Vector4 {
fn into(self) -> [f32; 4] {
[self.x, self.y, self.z, self.w]
}
}
impl Into<(f32, f32, f32, f32)> for Vector4 {
fn into(self) -> (f32, f32, f32, f32) {
(self.x, self.y, self.z, self.w)
}
}
| 19.895522 | 81 | 0.445236 | 3.078125 |
7541b62a34467e2119df5125dde81063db36ce24 | 2,467 | rs | Rust | core/src/eval/arithmetic.rs | contractshark/rust-cevm | 35cdefb760d41197ccfadc8c446343f20eba9080 | [
"Apache-2.0"
] | 47 | 2020-08-01T19:50:19.000Z | 2022-03-29T16:23:40.000Z | core/src/eval/arithmetic.rs | gakonst/rust-cevm | 35cdefb760d41197ccfadc8c446343f20eba9080 | [
"Apache-2.0"
] | null | null | null | core/src/eval/arithmetic.rs | gakonst/rust-cevm | 35cdefb760d41197ccfadc8c446343f20eba9080 | [
"Apache-2.0"
] | 4 | 2020-12-30T06:43:30.000Z | 2021-09-08T11:41:14.000Z | use crate::utils::I256;
use core::convert::TryInto;
use core::ops::Rem;
use primitive_types::{U256, U512};
pub fn div(op1: U256, op2: U256) -> U256 {
if op2 == U256::zero() {
U256::zero()
} else {
op1 / op2
}
}
pub fn sdiv(op1: U256, op2: U256) -> U256 {
let op1: I256 = op1.into();
let op2: I256 = op2.into();
let ret = op1 / op2;
ret.into()
}
pub fn rem(op1: U256, op2: U256) -> U256 {
if op2 == U256::zero() {
U256::zero()
} else {
op1.rem(op2)
}
}
pub fn srem(op1: U256, op2: U256) -> U256 {
if op2 == U256::zero() {
U256::zero()
} else {
let op1: I256 = op1.into();
let op2: I256 = op2.into();
let ret = op1.rem(op2);
ret.into()
}
}
pub fn addmod(op1: U256, op2: U256, op3: U256) -> U256 {
let op1: U512 = op1.into();
let op2: U512 = op2.into();
let op3: U512 = op3.into();
if op3 == U512::zero() {
U256::zero()
} else {
let v = (op1 + op2) % op3;
v.try_into()
.expect("op3 is less than U256::max_value(), thus it never overflows; qed")
}
}
pub fn mulmod(op1: U256, op2: U256, op3: U256) -> U256 {
let op1: U512 = op1.into();
let op2: U512 = op2.into();
let op3: U512 = op3.into();
if op3 == U512::zero() {
U256::zero()
} else {
let v = (op1 * op2) % op3;
v.try_into()
.expect("op3 is less than U256::max_value(), thus it never overflows; qed")
}
}
pub fn exp(op1: U256, op2: U256) -> U256 {
let mut op1 = op1;
let mut op2 = op2;
let mut r: U256 = 1.into();
while op2 != 0.into() {
if op2 & 1.into() != 0.into() {
r = r.overflowing_mul(op1).0;
}
op2 >>= 1;
op1 = op1.overflowing_mul(op1).0;
}
r
}
pub fn signextend(op1: U256, op2: U256) -> U256 {
if op1 > U256::from(32) {
op2
} else {
let mut ret = U256::zero();
let len: usize = op1.as_usize();
let t: usize = 8 * (len + 1) - 1;
let t_bit_mask = U256::one() << t;
let t_value = (op2 & t_bit_mask) >> t;
for i in 0..256 {
let bit_mask = U256::one() << i;
let i_value = (op2 & bit_mask) >> i;
if i <= t {
ret = ret.overflowing_add(i_value << i).0;
} else {
ret = ret.overflowing_add(t_value << i).0;
}
}
ret
}
}
| 23.495238 | 87 | 0.478719 | 3.21875 |
7f1887ad8207d20aadee5914b8aa3df9a823555d | 5,156 | go | Go | request/queue.go | KillianMeersman/wander | e8fab5fd01aad4be4cf835a6c6447e2491a51bc1 | [
"MIT"
] | null | null | null | request/queue.go | KillianMeersman/wander | e8fab5fd01aad4be4cf835a6c6447e2491a51bc1 | [
"MIT"
] | null | null | null | request/queue.go | KillianMeersman/wander | e8fab5fd01aad4be4cf835a6c6447e2491a51bc1 | [
"MIT"
] | null | null | null | package request
import (
"fmt"
"io"
"sync"
)
type QueueResult struct {
Error error
Request *Request
}
// Queue is a prioritized FIFO queue for requests
type Queue interface {
io.Closer
// Enqueue adds the request to the queue, returns an error if no more space is available.
Enqueue(req *Request, priority int) error
// Dequeue pops the highest priority request from the queue.
Dequeue() <-chan QueueResult
// Count returns the amount of queued requests.
Count() (int, error)
Clear()
}
// QueueMaxSize signals the Queue has reached its maximum size.
type QueueMaxSize struct {
size int
}
func (r QueueMaxSize) Error() string {
return fmt.Sprintf("Request queue has reached maximum size of %d", r.size)
}
type heapNode struct {
priority int
insertionCount int
request *Request
}
func less(a, b heapNode) bool {
if a.priority < b.priority {
return true
}
if a.priority == b.priority {
if a.insertionCount > b.insertionCount {
return true
}
}
return false
}
// RequestHeapQueue is a heap implementation for request.Queue.
type RequestHeapQueue struct {
data []heapNode
count int
maxSize int
insertionCount int
lock *sync.Mutex
waitCondition *sync.Cond
waitGroup *sync.WaitGroup
isDone bool
}
// NewRequestHeap returns a request heap (priority queue).
func NewRequestHeap(maxSize int) *RequestHeapQueue {
lock := &sync.Mutex{}
heap := &RequestHeapQueue{
data: make([]heapNode, maxSize/10),
maxSize: maxSize,
lock: lock,
waitCondition: sync.NewCond(lock),
waitGroup: &sync.WaitGroup{},
isDone: false,
}
return heap
}
// BuildHeap builds a request heap from existing data.
func BuildHeap(data []heapNode, maxSize int) *RequestHeapQueue {
heap := NewRequestHeap(maxSize)
for i := len(data) / 2; i >= 0; i-- {
heap.maxHeapify(i)
}
return heap
}
// Enqueue a request with the given priority.
func (r *RequestHeapQueue) Enqueue(req *Request, priority int) error {
r.lock.Lock()
defer r.lock.Unlock()
return r.insert(req, priority)
}
func (r *RequestHeapQueue) Dequeue() <-chan QueueResult {
outlet := make(chan QueueResult)
go func() {
r.waitGroup.Add(1)
r.waitCondition.L.Lock()
// wait untl an item is available or Close is called
for r.count < 1 && !r.isDone {
r.waitCondition.Wait()
}
if r.isDone {
r.waitCondition.L.Unlock()
} else {
req := r.extract()
r.waitCondition.L.Unlock()
outlet <- QueueResult{
Request: req,
}
}
r.waitGroup.Done()
}()
return outlet
}
func (r *RequestHeapQueue) Close() error {
r.isDone = true
r.waitCondition.Broadcast()
r.waitGroup.Wait()
return nil
}
func (r *RequestHeapQueue) Clear() {
for i := range r.data {
r.data[i] = heapNode{}
}
}
// Count returns the amount of requests in the queue.
func (r *RequestHeapQueue) Count() (int, error) {
return r.count, nil
}
// insert a request.
func (r *RequestHeapQueue) insert(req *Request, priority int) error {
node := heapNode{
priority: priority,
request: req,
insertionCount: r.insertionCount + 1,
}
if r.count >= len(r.data) {
newSize := (len(r.data) * 2) + 1
if newSize > r.maxSize {
if r.count == r.maxSize {
return &QueueMaxSize{size: r.maxSize}
}
newSize = r.maxSize
}
data := make([]heapNode, newSize)
copy(data, r.data)
r.data = data
}
i := r.count
parent := parentIndex(i)
r.data[i] = node
for i > 0 && r.data[i].priority > r.data[parent].priority {
r.data[i], r.data[parent] = r.data[parent], r.data[i]
i = parentIndex(i)
parent = parentIndex(i)
}
r.count++
r.insertionCount++
r.waitCondition.Signal()
return nil
}
// extract the root node and replace it with the last element, then sift down.
func (r *RequestHeapQueue) extract() *Request {
req := r.data[0].request
r.count--
r.data[0] = r.data[r.count]
r.maxHeapify(0)
return req
}
// Sort the heap so that the highest priority request is the root node
// Starts from i (array index) and sifts down, swapping nodes as nescesary along the way
func (r *RequestHeapQueue) maxHeapify(i int) {
max := i
for {
// get the children and set the current max value to the starting node
left := leftChildIndex(i)
right := rightChildIndex(i)
// if left child is not the last node and is less than the parent node, set max to this node index
if left < r.count && less(r.data[max], r.data[left]) {
max = left
}
// same thing, but with right child
if right < r.count && less(r.data[max], r.data[right]) {
max = right
}
// stop sifting if no swap occured, the heap is sorted
if max == i {
return
}
// if a swap occured, swap the actual data and continue sifting into the next node
r.data[i], r.data[max] = r.data[max], r.data[i]
i = max
}
}
// get the index of the left child node
func leftChildIndex(i int) int {
return (i * 2) + 1
}
// get the index of the right child node
func rightChildIndex(i int) int {
return (i * 2) + 2
}
// get the index of the parent node
func parentIndex(i int) int {
parent := ((i + 1) / 2) - 1
if parent < 0 {
return 0
}
return parent
}
| 21.663866 | 100 | 0.662917 | 3.328125 |
b4e0e572c835c484c0f44eb853b3fc2721f8cb63 | 1,033 | kt | Kotlin | roboquant-core/test/feeds/TestFeedTest.kt | jetonbacaj/roboquant | 11136be3cb62870532dc7e1888c5ea88ba17bcab | [
"Apache-2.0"
] | null | null | null | roboquant-core/test/feeds/TestFeedTest.kt | jetonbacaj/roboquant | 11136be3cb62870532dc7e1888c5ea88ba17bcab | [
"Apache-2.0"
] | null | null | null | roboquant-core/test/feeds/TestFeedTest.kt | jetonbacaj/roboquant | 11136be3cb62870532dc7e1888c5ea88ba17bcab | [
"Apache-2.0"
] | null | null | null | package org.roboquant.feeds
import kotlinx.coroutines.*
import org.junit.Test
import kotlin.test.*
import org.roboquant.TestData
import org.roboquant.common.Background
import org.roboquant.common.TimeFrame
import org.roboquant.feeds.test.TestFeed
fun play(feed:Feed, timeFrame: TimeFrame = TimeFrame.FULL): EventChannel {
val channel = EventChannel(timeFrame = timeFrame)
Background.ioJob {
feed.play(channel)
channel.close()
}
return channel
}
internal class TestFeedTest {
@Test
fun testTestFeed() = runBlocking{
val feed = TestFeed(5..9)
var cnt = 0
for (step in play(feed)) {
cnt++
}
assertEquals(5, cnt)
}
@Test
fun testTestFeedWithItems() = runBlocking{
val feed = TestFeed(120..130, 130 downTo 120, asset = TestData.euStock())
var cnt = 0
for (step in play(feed)) {
cnt++
assertTrue(step.actions.first() is PriceAction)
}
assertEquals(22, cnt)
}
} | 23.477273 | 81 | 0.631171 | 3.140625 |
af6f779fbe9f9d336e37d83d5ee4446277505939 | 1,614 | rb | Ruby | features/step_definitions/work_unit_steps.rb | AdGitHub2023/xrono | 1cdaebe285ee6115e948c7c0a43f34e2393d4b4a | [
"MIT"
] | 11 | 2015-03-16T14:45:03.000Z | 2019-02-09T08:03:41.000Z | features/step_definitions/work_unit_steps.rb | AdGitHub2023/xrono | 1cdaebe285ee6115e948c7c0a43f34e2393d4b4a | [
"MIT"
] | 3 | 2015-03-11T21:29:36.000Z | 2018-06-26T17:38:05.000Z | features/step_definitions/work_unit_steps.rb | AdGitHub2023/xrono | 1cdaebe285ee6115e948c7c0a43f34e2393d4b4a | [
"MIT"
] | 2 | 2015-12-13T17:33:51.000Z | 2019-07-21T20:20:18.000Z | Given /^I have (?:a|an) "([^\"]*)" work unit scheduled today for "([^\"]*)" hours$/ do |hours_type, hours|
WorkUnit.make(:hours_type => hours_type, :scheduled_at => Date.current, :user => @current_user, :hours => hours)
end
Then /^I should see the following work_units:$/ do |expected_work_units_table|
expected_work_units_table.diff!(find('table').all('tr').map { |row| row.all('th, td').map { |cell| cell.text.strip } })
end
When /^I create a work unit with #{capture_model}$/ do |ticket|
WorkUnit.make(:ticket => find_model!(ticket))
end
Given /^I have no work units for the previous day$/ do
@current_user.work_units.where(:scheduled_at => Date.yesterday).destroy_all
end
Given /^I have a "([^"]*)" hour work unit for yesterday with ticket "([^"]*)"$/ do |hours, ticket|
WorkUnit.make(:ticket => find_model!(ticket), :hours_type => "Normal",
:scheduled_at => 1.days.ago.beginning_of_day, :user => @current_user, :hours => hours)
end
Then /^that work unit should still have a scheduled at date of yesterday$/ do
WorkUnit.last.scheduled_at.should == 1.day.ago.beginning_of_day
end
Then /^I should see the new ticket fields$/ do
within("#on_demand_ticket") do
page.should have_css('#on_demand_ticket_name')
page.should have_css('#on_demand_ticket_description')
page.should have_css('#on_demand_ticket_estimated_hours')
end
end
Then /^there should be a ticket named "([^"]*)" with (\d+) hours$/ do |ticket_name, hours|
sleep(1)
@ticket = Ticket.where(:name => ticket_name).last
@ticket.should_not be_nil
@ticket.work_units.last.hours.should == BigDecimal(hours)
end
| 40.35 | 121 | 0.703222 | 3.0625 |
0ff2a8b6d1a72c706df715b01c61d59991920bb5 | 10,259 | kt | Kotlin | baseLib/src/main/java/com/gas/ext/file/ZipExt.kt | alinainai/MvvmJetpack | 7a7d524ec6d5adb013e4aa83bc6e01ec689c435a | [
"Apache-2.0"
] | null | null | null | baseLib/src/main/java/com/gas/ext/file/ZipExt.kt | alinainai/MvvmJetpack | 7a7d524ec6d5adb013e4aa83bc6e01ec689c435a | [
"Apache-2.0"
] | null | null | null | baseLib/src/main/java/com/gas/ext/file/ZipExt.kt | alinainai/MvvmJetpack | 7a7d524ec6d5adb013e4aa83bc6e01ec689c435a | [
"Apache-2.0"
] | null | null | null | package com.gas.ext.file
import android.util.Log
import com.gas.ext.app.debug
import com.gas.ext.io.toFile
import com.gas.ext.isSpace
import java.io.*
import java.util.*
import java.util.zip.ZipEntry
import java.util.zip.ZipException
import java.util.zip.ZipFile
import java.util.zip.ZipOutputStream
/**
* ================================================
* zipFiles : 批量压缩文件
* zipFile : 压缩文件
* unzipFile : 解压文件
* unzipFileByKeyword: 解压带有关键字的文件
* getFilesPath : 获取压缩文件中的文件路径链表
* getComments : 获取压缩文件中的注释链表*
* ================================================
*/
private const val BUFFER_LEN = 8192
/**
* Zip the files.
*
* @param srcFilePaths The paths of source files.
* @param zipFilePath The path of ZIP file.
* @param comment The comment.
* @return `true`: success<br></br>`false`: fail
* @throws IOException if an I/O error has occurred
*/
@Throws(IOException::class)
fun zipFiles(srcFilePaths: Collection<String>, zipFilePath: String, comment: String?=null): Boolean {
var zos: ZipOutputStream? = null
return try {
zos = ZipOutputStream(FileOutputStream(zipFilePath))
for (srcFile in srcFilePaths) {
if (!zipFile(srcFile.pathToFile(), "", zos, comment)) return false
}
true
} finally {
zos?.let {
it.finish()
it.close()
}
}
}
/**
* Zip the files.
*
* @param srcFiles The source of files.
* @param zipFile The ZIP file.
* @return `true`: success<br></br>`false`: fail
* @throws IOException if an I/O error has occurred
*/
@JvmOverloads
@Throws(IOException::class)
fun zipFiles(srcFiles: Collection<File?>, zipFile: File, comment: String? = null): Boolean {
var zos: ZipOutputStream? = null
return try {
zos = ZipOutputStream(FileOutputStream(zipFile))
for (srcFile in srcFiles) {
if (!zipFile(srcFile, "", zos, comment)) return false
}
true
} finally {
if (zos != null) {
zos.finish()
zos.close()
}
}
}
/**
* Zip the file.
*
* @param srcFilePath The path of source file.
* @param zipFilePath The path of ZIP file.
* @param comment The comment.
* @return `true`: success<br></br>`false`: fail
* @throws IOException if an I/O error has occurred
*/
@Throws(IOException::class)
fun zipFile(srcFilePath: String, zipFilePath: String, comment: String?=null): Boolean {
return zipFile(srcFilePath.pathToFile() ,zipFilePath.pathToFile(), comment)
}
/**
* Zip the file.
*
* @param srcFile The source of file.
* @param zipFile The ZIP file.
* @return `true`: success<br></br>`false`: fail
* @throws IOException if an I/O error has occurred
*/
@JvmOverloads
@Throws(IOException::class)
fun zipFile(srcFile: File?, zipFile: File?, comment: String? = null): Boolean {
if (srcFile == null || zipFile == null) return false
var zos: ZipOutputStream? = null
return try {
zos = ZipOutputStream(FileOutputStream(zipFile))
zipFile(srcFile, "", zos, comment)
} finally {
zos?.close()
}
}
@Throws(IOException::class)
private fun zipFile(srcFile: File?, rootPath: String, zos: ZipOutputStream, comment: String?): Boolean {
val rootPathNx = rootPath + (if (rootPath.isSpace()) "" else File.separator) + srcFile!!.name
if (srcFile.isDirectory) {
val fileList = srcFile.listFiles()
if (fileList == null || fileList.isEmpty()) {
val entry = ZipEntry("$rootPathNx/")
entry.comment = comment
zos.putNextEntry(entry)
zos.closeEntry()
} else {
for (file in fileList) {
if (!zipFile(file, rootPathNx, zos, comment)) return false
}
}
} else {
var input: InputStream? = null
try {
input = BufferedInputStream(FileInputStream(srcFile))
val entry = ZipEntry(rootPath)
entry.comment = comment
zos.putNextEntry(entry)
val buffer = ByteArray(BUFFER_LEN)
var len: Int
while (input.read(buffer, 0, BUFFER_LEN).also { len = it } != -1) {
zos.write(buffer, 0, len)
}
zos.closeEntry()
} finally {
input?.close()
}
}
return true
}
/**
* Unzip the file by keyword.
*
* @param zipFilePath The path of ZIP file.
* @param destDirPath The path of destination directory.
* @param keyword The keyboard.
* @return the unzipped files
* @throws IOException if unzip unsuccessfully
*/
@Throws(IOException::class)
fun unzipFileByKeyword(zipFilePath: String, destDirPath: String, keyword: String?=null): List<File>? {
return unzipFileByKeyword(zipFilePath.pathToFile(), destDirPath.pathToFile(), keyword)
}
/**
* Unzip the file by keyword.
*
* @param zipFile The ZIP file.
* @param destDir The destination directory.
* @param keyword The keyboard.
* @return the unzipped files
* @throws IOException if unzip unsuccessfully
*/
@Throws(IOException::class)
fun unzipFileByKeyword(zipFile: File?, destDir: File?, keyword: String?=null): List<File>? {
if (zipFile == null || destDir == null) return null
val files: MutableList<File> = ArrayList()
val zip = ZipFile(zipFile)
val entries: Enumeration<*> = zip.entries()
zip.use { zipClose ->
if (keyword.isNullOrEmpty()||keyword.isSpace()) {
while (entries.hasMoreElements()) {
val entry = entries.nextElement() as ZipEntry
val entryName = entry.name.replace("\\", "/")
if (entryName.contains("../")) {
Log.d("ZipUtils", "entryName: $entryName is dangerous!")
continue
}
if (!unzipChildFile(destDir, files, zipClose, entry, entryName)) return files
}
} else {
while (entries.hasMoreElements()) {
val entry = entries.nextElement() as ZipEntry
val entryName = entry.name.replace("\\", "/")
if (entryName.contains("../")) {
Log.d( "ZipUtils","entryName: $entryName is dangerous!")
continue
}
if (entryName.contains(keyword)) {
if (!unzipChildFile(destDir, files, zipClose, entry, entryName)) return files
}
}
}
}
return files
}
@Throws(IOException::class)
private fun unzipChildFile(destDir: File, files: MutableList<File>, zip: ZipFile, entry: ZipEntry, name: String): Boolean {
val file = File(destDir, name)
files.add(file)
if (entry.isDirectory) {
return file.existOrCreateDir()
} else {
if (file.existOrCreateFile()) return false
var input: InputStream? = null
var out: OutputStream? = null
try {
input = BufferedInputStream(zip.getInputStream(entry))
out = BufferedOutputStream(FileOutputStream(file))
val buffer = ByteArray(BUFFER_LEN)
var len: Int
while (input.read(buffer).also { len = it } != -1) {
out.write(buffer, 0, len)
}
} finally {
input?.close()
out?.close()
}
}
return true
}
/**
* Return the files' path in ZIP file.
*
* @param zipFilePath The path of ZIP file.
* @return the files' path in ZIP file
* @throws IOException if an I/O error has occurred
*/
@Throws(IOException::class)
fun getFilesPath(zipFilePath: String): List<String>? {
return getFilesPath(zipFilePath.pathToFile())
}
/**
* Return the files' path in ZIP file.
*
* @param zipFile The ZIP file.
* @return the files' path in ZIP file
* @throws IOException if an I/O error has occurred
*/
@Throws(IOException::class)
fun getFilesPath(zipFile: File?): List<String>? {
if (zipFile == null) return null
val paths: MutableList<String> = ArrayList()
val zip = ZipFile(zipFile)
val entries: Enumeration<*> = zip.entries()
while (entries.hasMoreElements()) {
val entryName = (entries.nextElement() as ZipEntry).name.replace("\\", "/")
if (entryName.contains("../")) {
Log.e("ZipUtils", "entryName: $entryName is dangerous!")
paths.add(entryName)
} else {
paths.add(entryName)
}
}
zip.close()
return paths
}
/**
* Return the files' comment in ZIP file.
*
* @param zipFilePath The path of ZIP file.
* @return the files' comment in ZIP file
* @throws IOException if an I/O error has occurred
*/
@Throws(IOException::class)
fun getComments(zipFilePath: String): List<String>? {
return getComments(zipFilePath.pathToFile())
}
/**
* Return the files' comment in ZIP file.
*
* @param zipFile The ZIP file.
* @return the files' comment in ZIP file
* @throws IOException if an I/O error has occurred
*/
@Throws(IOException::class)
fun getComments(zipFile: File?): List<String>? {
if (zipFile == null) return null
val comments: MutableList<String> = ArrayList()
val zip = ZipFile(zipFile)
val entries: Enumeration<*> = zip.entries()
while (entries.hasMoreElements()) {
val entry = entries.nextElement() as ZipEntry
comments.add(entry.comment)
}
zip.close()
return comments
}
/**
* 解压缩一个文件
* @param zipFile 压缩文件
* @param folderPath 解压缩的目标目录
* @throws IOException 当解压缩过程出错时抛出
*/
@Throws(ZipException::class, IOException::class)
fun File.unzipFile(folderPath: String) {
File(folderPath).ensureFolder()
val zf = ZipFile(this)
val entries = zf.entries()
while (entries.hasMoreElements()) {
val entry = entries.nextElement()
if (entry.isDirectory) {
continue
}
zf.getInputStream(entry).toFile(File(folderPath + File.separator + entry.name))
}
}
/**
* 解压文件时,如果文件解压失败,会删除异常的文件,但仍然向外抛出异常
* @param zipFile 压缩文件
* @param folderPath 解压缩的目标目录
* @throws IOException 当解压缩过程出错时抛出
*/
@Throws(ZipException::class, IOException::class)
fun File.unzipAndSafeDelete(folderPath: String) {
try {
unzipFile(folderPath)
} catch (t: Throwable) {
throw t
} finally {
safeDelete()
}
}
| 29.736232 | 123 | 0.610001 | 3.0625 |
8599ffe35407070cd185c4dc33fdccea1859108b | 3,269 | js | JavaScript | pages/product.js | DCKT/next-blog | 85d0cd455685929f841c07dfad8d6abf5af99780 | [
"MIT"
] | null | null | null | pages/product.js | DCKT/next-blog | 85d0cd455685929f841c07dfad8d6abf5af99780 | [
"MIT"
] | 14 | 2020-06-11T05:36:12.000Z | 2022-03-15T20:23:36.000Z | pages/product.js | DCKT/next-blog | 85d0cd455685929f841c07dfad8d6abf5af99780 | [
"MIT"
] | null | null | null | // @flow
import React from 'react'
import Layout from '../components/Layout'
import * as Moltin from '../utils/js/moltin'
import Button from '../components/Button'
import { addProduct } from '../actions/cart'
import type { TMoltinProduct, TMoltinImage } from '../utils/js/types'
import classNames from 'classnames'
import providerConnect from '../components/_Provider'
type Props = {
product: TMoltinProduct,
dispatch: () => any,
isServer: boolean,
initialState: Object
}
type State = {
currentPicture: TMoltinImage,
isLoading: boolean
}
class ProductDetails extends React.Component {
props: Props
state: State
store: Object
static async getInitialProps ({ query, req }) {
const id = query.id || query.slug.split('_')[1]
const product = await Moltin.fetchProduct(id)
return { product }
}
componentDidMount () {
this.setState({ currentPicture: this.props.product.images[0] })
}
componentDidUpdate (prevProps, prevState) {
if (prevProps.product.id !== this.props.product.id) {
this.setState({ currentPicture: this.props.product.images[0] }) // eslint-disable-line react/no-did-update-set-state
}
}
constructor (props: Props) {
super(props)
this.state = {
currentPicture: null,
isLoading: false
}
}
render () {
const { product } = this.props
const { currentPicture, isLoading } = this.state
const { title, description, brand, images } = product
const addButtonCartClassName = classNames({
'is-loading': isLoading
})
return product ? (
<Layout title={title}>
<div className='container'>
<div className='columns'>
<div className='column is-half'>
<div>
{
currentPicture ? <img src={currentPicture.url.http} alt={currentPicture.name} /> : null
}
</div>
<div className='columns'>
{
images.map(this._renderPictures)
}
</div>
</div>
<div className='column is-half'>
<section className='section'>
<div className='heading'>
<h1 className='title'>{ title }</h1>
<h2 className='subtitle'>{ brand.value }</h2>
</div>
<p className='content'>
{ description }
</p>
<div>
<Button type='primary' onClick={this._addProduct} className={addButtonCartClassName}>
Ajouter au panier
</Button>
</div>
</section>
</div>
</div>
</div>
</Layout>
) : null
}
_renderPictures = (picture: TMoltinImage, i: number): React$Element<*> =>
<div className='column' key={i}>
<img src={picture.url.http} alt={picture.name} onClick={this._changeCurrentPicture(picture)} />
</div>
_changeCurrentPicture = (picture: TMoltinImage): Function => () => {
this.setState({ currentPicture: picture })
}
_addProduct = (): void => {
const { product } = this.props
this.props.dispatch(addProduct(product))
}
}
export default providerConnect()(ProductDetails)
| 27.940171 | 122 | 0.576017 | 3.25 |
79fa2b212057659746f9224f97fb012e1eed02f8 | 5,410 | lua | Lua | PersonalAssistant/PersonalAssistantJunk/PAJunk/PAJunkCustom.lua | Nesferatum/ESO-PersonalAssistant | fb1063438999ab3a7a75923a196591133ec83fee | [
"Zlib"
] | null | null | null | PersonalAssistant/PersonalAssistantJunk/PAJunk/PAJunkCustom.lua | Nesferatum/ESO-PersonalAssistant | fb1063438999ab3a7a75923a196591133ec83fee | [
"Zlib"
] | null | null | null | PersonalAssistant/PersonalAssistantJunk/PAJunk/PAJunkCustom.lua | Nesferatum/ESO-PersonalAssistant | fb1063438999ab3a7a75923a196591133ec83fee | [
"Zlib"
] | null | null | null | -- Local instances of Global tables --
local PA = PersonalAssistant
local PAJ = PA.Junk
local PAHF = PA.HelperFunctions
-- ---------------------------------------------------------------------------------------------------------------------
local function _unmarkAllPAItemIdsFromJunk(paItemId)
PAJ.debugln("#_unmarkAllPAItemIdsFromJunk(%s)", tostring(paItemId))
local customPAItems = {
[paItemId] = {}
}
local excludeJunk, excludeCharacterBound, excludeStolen = false, false, false
local paItemIdComparator = PAHF.getPAItemIdComparator(customPAItems, excludeJunk, excludeCharacterBound, excludeStolen)
local bagCache = SHARED_INVENTORY:GenerateFullSlotData(paItemIdComparator, PAHF.getAccessibleBags())
PAJ.debugln("#bagCache = "..tostring(#bagCache))
for index = #bagCache, 1, -1 do
local itemData = bagCache[index]
local isJunk = IsItemJunk(itemData.bagId, itemData.slotIndex)
if isJunk then
SetItemIsJunk(itemData.bagId, itemData.slotIndex, false)
PlaySound(SOUNDS.INVENTORY_ITEM_UNJUNKED)
end
end
end
local function _markAllPAItemIdsAsJunk(paItemId)
PAJ.debugln("#_markAllPAItemIdsAsJunk(%s)", tostring(paItemId))
local customPAItems = {
[paItemId] = {}
}
local excludeJunk, excludeCharacterBound, excludeStolen = true, false, false
local paItemIdComparator = PAHF.getPAItemIdComparator(customPAItems, excludeJunk, excludeCharacterBound, excludeStolen)
local bagCache = SHARED_INVENTORY:GenerateFullSlotData(paItemIdComparator, PAHF.getAccessibleBags())
PAJ.debugln("#bagCache = "..tostring(#bagCache))
for index = #bagCache, 1, -1 do
local itemData = bagCache[index]
if CanItemBeMarkedAsJunk(itemData.bagId, itemData.slotIndex) then
SetItemIsJunk(itemData.bagId, itemData.slotIndex, true)
PlaySound(SOUNDS.INVENTORY_ITEM_JUNKED)
end
end
end
-- ---------------------------------------------------------------------------------------------------------------------
local function getNonStolenItemLink(itemLink)
-- if itemLink is NOT stolen, directly return it
if not IsItemLinkStolen(itemLink) then return itemLink end
-- if it is stolen, remove first the stolen information
local itemLinkMod = string.gsub(itemLink, "1(:%d+:%d+|h|h)$", "0%1")
-- then also remove the red border
itemLinkMod = string.gsub(itemLinkMod, "%d+(:%d+:%d+:%d+:%d+:%d+:%d+|h|h)$", "0%1")
return itemLinkMod
end
local function isItemLinkPermanentJunk(itemLink)
local PAJCUstomPAItemIds = PAJ.SavedVars.Custom.PAItemIds
local paItemId = PAHF.getPAItemLinkIdentifier(itemLink)
return PAHF.isKeyInTable(PAJCUstomPAItemIds, paItemId)
end
local function isItemPermanentJunk(bagId, slotIndex)
local PAJCUstomPAItemIds = PAJ.SavedVars.Custom.PAItemIds
local paItemId = PAHF.getPAItemIdentifier(bagId, slotIndex)
return PAHF.isKeyInTable(PAJCUstomPAItemIds, paItemId)
end
local function addItemLinkToPermanentJunk(itemLink)
PAJ.debugln("PA.Junk.addItemLinkToPermanentJunk")
if PAJ.SavedVars.Custom.customItemsEnabled then
local PAJCUstomPAItemIds = PAJ.SavedVars.Custom.PAItemIds
local paItemId = PAHF.getPAItemLinkIdentifier(itemLink)
-- only add the entry if it is an UPDATE case, or if it does not exist yet
if not PAHF.isKeyInTable(PAJCUstomPAItemIds, paItemId) then
local localItemLink = getNonStolenItemLink(itemLink)
PAJCUstomPAItemIds[paItemId] = {
itemLink = localItemLink,
junkCount = 0,
ruleAdded = GetTimeStamp()
}
PA.Junk.println(SI_PA_CHAT_JUNK_RULES_ADDED, localItemLink:gsub("%|H0", "|H1"))
-- loop though whole inventory to mark all matching items
_markAllPAItemIdsAsJunk(paItemId)
-- refresh the list (if it was initialized)
if PA.JunkRulesList then PA.JunkRulesList:Refresh() end
else
PAJ.debugln("ERROR; PAJ rule already existing")
end
end
end
local function removeItemLinkFromPermanentJunk(itemLink)
PAJ.debugln("PA.Junk.removeItemLinkFromPermanentJunk")
if PAJ.SavedVars.Custom.customItemsEnabled then
local PAJCUstomPAItemIds = PAJ.SavedVars.Custom.PAItemIds
local paItemId = PAHF.getPAItemLinkIdentifier(itemLink)
if PAHF.isKeyInTable(PAJCUstomPAItemIds, paItemId) then
-- is in table, delete rule
PAJCUstomPAItemIds[paItemId] = nil
PAJ.println(SI_PA_CHAT_JUNK_RULES_DELETED, itemLink:gsub("%|H0", "|H1"))
-- loop though whole inventory to unmark all matching items
_unmarkAllPAItemIdsFromJunk(paItemId)
-- refresh the list (if it was initialized)
if PA.JunkRulesList then PA.JunkRulesList:Refresh() end
else
PAJ.debugln("ERROR; PAJ rule not existing, cannot be deleted")
end
end
end
-- ---------------------------------------------------------------------------------------------------------------------
-- Export
PA.Junk = PA.Junk or {}
PA.Junk.Custom = {
isItemLinkPermanentJunk = isItemLinkPermanentJunk,
isItemPermanentJunk = isItemPermanentJunk,
addItemLinkToPermanentJunk = addItemLinkToPermanentJunk,
removeItemLinkFromPermanentJunk = removeItemLinkFromPermanentJunk
} | 42.265625 | 123 | 0.661922 | 3.03125 |
2fba1f787fafcd9f89766b0283db8d121a83b471 | 2,283 | rs | Rust | foundationdb/src/directory/node.rs | foundationdb-rs/foundationdb-rs | a309d67946a272d245b6d1d1072b2196d5951655 | [
"Apache-2.0",
"MIT"
] | 13 | 2021-12-09T07:05:11.000Z | 2022-02-12T18:43:27.000Z | foundationdb/src/directory/node.rs | foundationdb-rs/foundationdb-rs | a309d67946a272d245b6d1d1072b2196d5951655 | [
"Apache-2.0",
"MIT"
] | 37 | 2021-12-08T16:24:05.000Z | 2022-03-24T13:19:57.000Z | foundationdb/src/directory/node.rs | foundationdb-rs/foundationdb-rs | a309d67946a272d245b6d1d1072b2196d5951655 | [
"Apache-2.0",
"MIT"
] | 3 | 2021-12-08T18:27:11.000Z | 2021-12-29T03:41:27.000Z | use crate::directory::directory_layer::{
DirectoryLayer, DEFAULT_SUB_DIRS, LAYER_SUFFIX, PARTITION_LAYER,
};
use crate::directory::error::DirectoryError;
use crate::directory::DirectoryOutput;
use crate::tuple::Subspace;
use crate::RangeOption;
use crate::Transaction;
#[derive(Debug, Clone)]
pub(super) struct Node {
pub(super) subspace: Subspace,
pub(super) current_path: Vec<String>,
pub(super) target_path: Vec<String>,
pub(super) directory_layer: DirectoryLayer,
pub(super) layer: Vec<u8>,
}
impl Node {
// `load_metadata` is loading extra information for the node, like the layer
pub(crate) async fn load_metadata(
trx: &Transaction,
subspace: &Subspace,
) -> Result<Vec<u8>, DirectoryError> {
let key = subspace.pack(&LAYER_SUFFIX);
let layer = match trx.get(&key, false).await {
Err(err) => return Err(DirectoryError::FdbError(err)),
Ok(fdb_slice) => fdb_slice.as_deref().unwrap_or_default().to_vec(),
};
Ok(layer)
}
pub(crate) fn get_partition_subpath(&self) -> Vec<String> {
Vec::from(&self.target_path[self.current_path.len()..])
}
/// list sub-folders for a node
pub(crate) async fn list_sub_folders(
&self,
trx: &Transaction,
) -> Result<Vec<String>, DirectoryError> {
let mut results = vec![];
let range_option = RangeOption::from(&self.subspace.subspace(&DEFAULT_SUB_DIRS));
let fdb_values = trx.get_range(&range_option, 1_024, false).await?;
for fdb_value in fdb_values {
let subspace = Subspace::from_bytes(fdb_value.key());
// stripping from subspace
let sub_directory: (i64, String) = self.subspace.unpack(subspace.bytes())?;
results.push(sub_directory.1);
}
Ok(results)
}
pub(crate) fn is_in_partition(&self, include_empty_subpath: bool) -> bool {
self.layer.as_slice().eq(PARTITION_LAYER)
&& (include_empty_subpath || self.target_path.len() > self.current_path.len())
}
pub(crate) fn get_contents(&self) -> Result<DirectoryOutput, DirectoryError> {
self.directory_layer
.contents_of_node(&self.subspace, &self.current_path, &self.layer)
}
}
| 34.074627 | 90 | 0.64608 | 3.0625 |
125eef5327aeb1c7b10700015c0e568e63b49d63 | 1,348 | c | C | docs/examples_src/pbuf_extract.c | jbrryhooves/lwgsm | 98924cfb5008e290f15f617755bdc8ca54c9ca04 | [
"MIT"
] | 130 | 2020-07-15T01:12:52.000Z | 2022-03-24T09:34:57.000Z | docs/examples_src/pbuf_extract.c | jbrryhooves/lwgsm | 98924cfb5008e290f15f617755bdc8ca54c9ca04 | [
"MIT"
] | 31 | 2020-08-05T12:43:08.000Z | 2022-03-21T10:21:08.000Z | docs/examples_src/pbuf_extract.c | jbrryhooves/lwgsm | 98924cfb5008e290f15f617755bdc8ca54c9ca04 | [
"MIT"
] | 57 | 2018-02-09T10:49:30.000Z | 2019-12-05T08:35:38.000Z | const void* data;
size_t pos, len;
lwgsm_pbuf_p a, b, c;
const char str_a[] = "This is one long";
const char str_a[] = "string. We want to save";
const char str_a[] = "chain of pbufs to file";
/* Create pbufs to hold these strings */
a = lwgsm_pbuf_new(strlen(str_a));
b = lwgsm_pbuf_new(strlen(str_b));
c = lwgsm_pbuf_new(strlen(str_c));
/* Write data to pbufs */
lwgsm_pbuf_take(a, str_a, strlen(str_a), 0);
lwgsm_pbuf_take(b, str_b, strlen(str_b), 0);
lwgsm_pbuf_take(c, str_c, strlen(str_c), 0);
/* Connect pbufs together */
lwgsm_pbuf_chain(a, b);
lwgsm_pbuf_chain(a, c);
/*
* pbuf a now contains chain of b and c together
* and at this point application wants to print (or save) data from chained pbuf
*
* Process pbuf by pbuf with code below
*/
/*
* Get linear address of current pbuf at specific offset
* Function will return pointer to memory address at specific position
* and `len` will hold length of data block
*/
pos = 0;
while ((data = lwgsm_pbuf_get_linear_addr(a, pos, &len)) != NULL) {
/* Custom process function... */
/* Process data with data pointer and block length */
process_data(data, len);
printf("Str: %.*s", len, data);
/* Increase offset position for next block */
pos += len;
}
/* Call free only on a pbuf. Since it is chained, b and c will be freed too */
lwgsm_pbuf_free(a);
| 28.083333 | 80 | 0.688427 | 3.203125 |
015a0ac5c71bd1d99d1ec8db59a466b5521d34a8 | 2,968 | lua | Lua | dev/spark/gamemode/hookwars/game.lua | chrisinajar/spark | 3c6b30592c00bc38738cc3aaca2144edfc6cc8b2 | [
"AML"
] | 2 | 2020-08-20T03:40:24.000Z | 2021-02-07T20:31:43.000Z | dev/spark/gamemode/hookwars/game.lua | chrisinajar/spark | 3c6b30592c00bc38738cc3aaca2144edfc6cc8b2 | [
"AML"
] | null | null | null | dev/spark/gamemode/hookwars/game.lua | chrisinajar/spark | 3c6b30592c00bc38738cc3aaca2144edfc6cc8b2 | [
"AML"
] | 5 | 2020-08-27T20:44:18.000Z | 2021-08-21T22:54:11.000Z | require "scripts.info";
require "scripts.core.unit"
Require("CommonCheatCodes")
Game={}
function Game:OnActivate (gameMode)
self.gameMode = gameMode
Debug.Log("Game:OnActivate()")
Debug.Log("gamemode is " .. self.gameMode);
if( self:IsAuthoritative() ) then
GameManagerRequestBus.Broadcast.CreateUnitWithJson("bard",'{ "team":"left" }');
GameManagerRequestBus.Broadcast.CreateUnitWithJson("bard",'{ "team":"right"}');
end
self.unitsHandler = UnitsNotificationBus.Connect(self);
self.commandHandler = ConsoleNotificationBus.Connect(self);
self.variableFilter = VariableManagerNotificationBus.Connect(self);
Debug.Log("connected to filter")
end
function Game:OnDeactivate()
Debug.Log("Game:OnDeactivate()")
self.unitsHandler:Disconnect()
self.commandHandler:Disconnect()
self.variableFilter:Disconnect()
end
function Game:OnSetValueFilter(id,value)
if self.wtf == true then
if(id.variableId == "mana") then
FilterResult(FilterResult.FILTER_PREVENT);
end
if(id.variableId == "cooldown_timer") then
FilterResult(FilterResult.FILTER_PREVENT);
end
end
end
function Game:AddTimer(seconds, description)
self.timerHandler = self.timerHandler or {}
local ticket=TimerRequestBus.Broadcast.ScheduleTimer(seconds,description)
local handler=TimerNotificationBus.Connect(self,ticket)
table.insert(self.timerHandler, handler)
end
function Game:OnUnitSpawned(unitId)
local unit = Unit({entityId=unitId})
Debug.Log("Game:OnUnitSpawned"..tostring(unitId));
AudioRequestBus.Broadcast.PlaySound("Play_sfx_respawn");
local info = GetUnitInfo(unitId);
local tags=vector_Crc32();
tags:push_back(Crc32("spawn"));
if(info and info.team) then
Debug.Log("unit's team is : "..info.team)
tags:push_back(Crc32(info.team));
else
return;
end
local spawn = GameManagerRequestBus.Broadcast.GetEntitiesHavingTags(tags);
if spawn and #spawn>=1 then
Debug.Log("Game:OnUnitSpawned relocating the unit to the spawn")
spawn = spawn[1];
local new_pos = TransformBus.Event.GetWorldTranslation(spawn)
unit:SetPosition(new_pos)
else
Debug.Log("Game:OnUnitSpawned spawn not found")
end
end
function Game:OnUnitDeath(unitId)
Debug.Log("Game:OnUnitDeath");
--using the timer we only can pass a string.
--for now there is no way to create a entityId from a string, so we need to do it like that
self.deadUnits = self.deadUnits or {}
table.insert(self.deadUnits,unitId)
self:AddTimer(4,tostring(unitId))
end
function Game:OnTimerFinished(description)
Debug.Log("OnTimerFinished "..description)
for i=1,#self.deadUnits do
if( tostring(self.deadUnits[i]) == description) then
local unit = Unit({entityId=self.deadUnits[i]})
--UnitRequestBus.Event.SetAlive(self.deadUnits[i],true)
unit:SetAlive(true)
unit:SetValue("hp_percentage", 1);
unit:SetValue("mana_percentage", 1);
break;
end
end
end
function Game:OnCommandFilter(cmd)
return CommonCheatCodes(self,cmd)
end
return Game; | 23.935484 | 92 | 0.753369 | 3.03125 |
7b62bb4e5d5cfb098274e782be30d68abcb706bc | 945 | rb | Ruby | src/assignments/age-verifier/solution.rb | leojh/learning-ruby | db3abdba7f8d974127c3eb7f592cc52cb81c169a | [
"MIT"
] | null | null | null | src/assignments/age-verifier/solution.rb | leojh/learning-ruby | db3abdba7f8d974127c3eb7f592cc52cb81c169a | [
"MIT"
] | null | null | null | src/assignments/age-verifier/solution.rb | leojh/learning-ruby | db3abdba7f8d974127c3eb7f592cc52cb81c169a | [
"MIT"
] | null | null | null | #As a developer on the App Store team, you are required to check the age of every user.
#Due to US law, users wishing to open accounts online but be at least 13 years of age.
#Your job as a developer is to write a module that takes in a user's date of birth
#and makes sure that the user is at least 13 years old.
require 'Date'
def isUser13YearsOld(dateOfBirth)
numberOfMonthsIn13Years = 12 * 13
min = Date.today << numberOfMonthsIn13Years
dateOfBirth <= min
end
def yearsBetweenDates(date1, date2)
((date2.year - date1.year) * 12 + date2.month - date1.month - (date2.day >= date1.day ? 0 : 1)) / 12
end
def main
puts "Enter Date of Birth (yyyy-mm-dd):"
dateOfBirth = Date.parse(gets.chomp)
puts "You are #{yearsBetweenDates(dateOfBirth, Date.today)}"
userIs13 = isUser13YearsOld(dateOfBirth)
if (userIs13)
puts "You are 13 or older!"
else
puts "Sorry, you need to be at least 13 years old"
end
end
main()
| 27.794118 | 102 | 0.716402 | 3 |
750883f83933d9cbc5118238951e80289a752496 | 42,857 | c | C | libsignaletic/src/libsignaletic.c | continuing-creativity/signaletic | 4a91207c8b2ca1a67f72402975e9348a4bed3d13 | [
"MIT"
] | null | null | null | libsignaletic/src/libsignaletic.c | continuing-creativity/signaletic | 4a91207c8b2ca1a67f72402975e9348a4bed3d13 | [
"MIT"
] | 14 | 2022-01-31T21:01:59.000Z | 2022-02-22T03:17:28.000Z | libsignaletic/src/libsignaletic.c | continuing-creativity/signaletic | 4a91207c8b2ca1a67f72402975e9348a4bed3d13 | [
"MIT"
] | null | null | null | #include <math.h> // For powf, fmodf, sinf, roundf, fabsf, rand
#include <stdlib.h> // For RAND_MAX
#include <tlsf.h> // Includes assert.h, limits.h, stddef.h
// stdio.h, stdlib.h, string.h (for errors etc.)
#include <libsignaletic.h>
float sig_fminf(float a, float b) {
float r;
#ifdef __arm__
asm("vminnm.f32 %[d], %[n], %[m]" : [d] "=t"(r) : [n] "t"(a), [m] "t"(b) :);
#else
r = (a < b) ? a : b;
#endif // __arm__
return r;
}
float sig_fmaxf(float a, float b) {
float r;
#ifdef __arm__
asm("vmaxnm.f32 %[d], %[n], %[m]" : [d] "=t"(r) : [n] "t"(a), [m] "t"(b) :);
#else
r = (a > b) ? a : b;
#endif // __arm__
return r;
}
// TODO: Unit tests
float sig_clamp(float value, float min, float max) {
return sig_fminf(sig_fmaxf(value, min), max);
}
// TODO: Replace this with an object that implements
// the quick and dirty LCR method from Numerical Recipes:
// unsigned long jran = seed,
// ia = 4096,
// ic = 150889,
// im = 714025;
// jran=(jran*ia+ic) % im;
// float ran=(float) jran / (float) im;
float sig_randf() {
return (float) ((double) rand() / ((double) RAND_MAX + 1));
}
uint16_t sig_unipolarToUint12(float sample) {
return (uint16_t) (sample * 4095.0f);
}
uint16_t sig_bipolarToUint12(float sample) {
float normalized = sample * 0.5 + 0.5;
return (uint16_t) (normalized * 4095.0f);
}
uint16_t sig_bipolarToInvUint12(float sample) {
return sig_bipolarToUint12(-sample);
}
float sig_midiToFreq(float midiNum) {
return powf(2, (midiNum - 69.0f) / 12.0f) * 440.0f;
}
float sig_randomFill(size_t i, float_array_ptr array) {
return sig_randf();
}
void sig_fill(float_array_ptr array, size_t length,
sig_array_filler filler) {
for (size_t i = 0; i < length; i++) {
FLOAT_ARRAY(array)[i] = filler(i, array);
}
}
void sig_fillWithValue(float_array_ptr array, size_t size,
float value) {
for (size_t i = 0; i < size; i++) {
FLOAT_ARRAY(array)[i] = value;
}
}
void sig_fillWithSilence(float_array_ptr array, size_t size) {
sig_fillWithValue(array, size, 0.0f);
}
// TODO: Unit tests.
float sig_interpolate_linear(float idx, float_array_ptr table,
size_t length) {
int32_t idxIntegral = (int32_t) idx;
float idxFractional = idx - (float) idxIntegral;
float a = FLOAT_ARRAY(table)[idxIntegral];
// TODO: Do we want to wrap around the end like this,
// or should we expect users to provide us with idx within bounds?
float b = FLOAT_ARRAY(table)[(idxIntegral + 1) % length];
return a + (b - a) * idxFractional;
}
// TODO: Unit tests.
float sig_interpolate_cubic(float idx, float_array_ptr table,
size_t length) {
size_t idxIntegral = (size_t) idx;
float idxFractional = idx - (float) idxIntegral;
// TODO: As above, are these modulo operations required,
// or should we expect users to provide us in-bound values?
const size_t i0 = idxIntegral % length;
const float xm1 = FLOAT_ARRAY(table)[i0 > 0 ? i0 - 1 : length - 1];
const float x0 = FLOAT_ARRAY(table)[i0];
const float x1 = FLOAT_ARRAY(table)[(i0 + 1) % length];
const float x2 = FLOAT_ARRAY(table)[(i0 + 2) % length];
const float c = (x1 - xm1) * 0.5f;
const float v = x0 - x1;
const float w = c + v;
const float a = w + v + (x2 - x0) * 0.5f;
const float bNeg = w + a;
return (((a * idxFractional) - bNeg) * idxFractional + c) *
idxFractional + x0;
}
// TODO: Unit tests.
float sig_filter_onepole(float current, float previous, float coeff) {
return current + coeff * (previous - current);
}
// TODO: Unit tests.
float sig_waveform_sine(float phase) {
return sinf(phase);
}
// TODO: Unit tests.
float sig_waveform_square(float phase) {
return phase <= sig_PI ? 1.0f : -1.0f;
}
// TODO: Unit tests.
float sig_waveform_saw(float phase) {
return (2.0f * (phase * (1.0f / sig_TWOPI))) - 1.0f;
}
// TODO: Unit tests.
float sig_waveform_reverseSaw(float phase) {
return 1.0f - 2.0f * (phase * (1.0f / sig_TWOPI));
}
// TODO: Unit tests.
float sig_waveform_triangle(float phase) {
float val = sig_waveform_saw(phase);
if (val < 0.0) {
val = -val;
}
return 2.0f * (val - 0.5f);
}
// TODO: Implement enough test coverage for sig_Allocator
// to support a switch from TLSF to another memory allocator
// implementation sometime in the future (gh-26).
void sig_Allocator_init(struct sig_Allocator* self) {
tlsf_create_with_pool(self->heap, self->heapSize);
}
void* sig_Allocator_malloc(struct sig_Allocator* self, size_t size) {
return tlsf_malloc(self->heap, size);
}
void sig_Allocator_free(struct sig_Allocator* self, void* obj) {
tlsf_free(self->heap, obj);
}
struct sig_AudioSettings* sig_AudioSettings_new(
struct sig_Allocator* allocator) {
struct sig_AudioSettings* settings =
(struct sig_AudioSettings*) sig_Allocator_malloc(
allocator, sizeof(struct sig_AudioSettings));
settings->sampleRate = sig_DEFAULT_AUDIOSETTINGS.sampleRate;
settings->numChannels = sig_DEFAULT_AUDIOSETTINGS.numChannels;
settings->blockSize = sig_DEFAULT_AUDIOSETTINGS.blockSize;
return settings;
}
void sig_AudioSettings_destroy(struct sig_Allocator* allocator,
struct sig_AudioSettings* self) {
sig_Allocator_free(allocator, self);
}
// TODO: Unit tests.
size_t sig_secondsToSamples(struct sig_AudioSettings* audioSettings,
float duration) {
float numSamplesF = audioSettings->sampleRate * duration;
long rounded = lroundf(numSamplesF);
return (size_t) labs(rounded);
}
float_array_ptr sig_samples_new(struct sig_Allocator* allocator,
size_t length) {
return (float_array_ptr) sig_Allocator_malloc(allocator,
sizeof(float) * length);
}
// TODO: Does an AudioBlock type need to be introduced?
// TODO: Do we need a destroy function too?
float_array_ptr sig_AudioBlock_new(
struct sig_Allocator* allocator,
struct sig_AudioSettings* audioSettings) {
return sig_samples_new(allocator, audioSettings->blockSize);
}
float_array_ptr sig_AudioBlock_newWithValue(
struct sig_Allocator* allocator,
struct sig_AudioSettings* audioSettings,
float value) {
float_array_ptr block = sig_AudioBlock_new(allocator,
audioSettings);
sig_fillWithValue(block, audioSettings->blockSize, value);
return block;
}
struct sig_Buffer* sig_Buffer_new(struct sig_Allocator* allocator,
size_t length) {
struct sig_Buffer* self = (struct sig_Buffer*) sig_Allocator_malloc(allocator, sizeof(struct sig_Buffer));
self->length = length;
self->samples = sig_samples_new(allocator, length);
return self;
}
void sig_Buffer_fill(struct sig_Buffer* self,
sig_array_filler filler) {
sig_fill(self->samples, self->length, filler);
}
void sig_Buffer_fillWithValue(struct sig_Buffer* self, float value) {
sig_fillWithValue(self->samples, self->length, value);
}
void sig_Buffer_fillWithSilence(struct sig_Buffer* self) {
sig_fillWithSilence(self->samples, self->length);
}
// TODO: Unit tests.
void sig_Buffer_fillWithWaveform(struct sig_Buffer* self,
sig_waveform_generator generate, float sampleRate,
float phase, float freq) {
float phaseInc = freq * sig_TWOPI / sampleRate;
for (size_t i = 0; i < self->length; i++) {
FLOAT_ARRAY(self->samples)[i] = generate(phase);
phase += phaseInc;
if (phase >= sig_TWOPI) {
phase -= sig_TWOPI;
} else if (phase < 0.0) {
phase += sig_TWOPI;
}
}
}
void sig_Buffer_destroy(struct sig_Allocator* allocator, struct sig_Buffer* self) {
sig_Allocator_free(allocator, self->samples);
sig_Allocator_free(allocator, self);
};
struct sig_Buffer* sig_BufferView_new(
struct sig_Allocator* allocator,
struct sig_Buffer* buffer, size_t startIdx, size_t length) {
struct sig_Buffer* self = (struct sig_Buffer*) sig_Allocator_malloc(allocator, sizeof(struct sig_Buffer));
// TODO: Need to signal an error rather than
// just returning a null pointer and a length of zero.
if (startIdx < 0 || length > (buffer->length - startIdx)) {
self->samples = NULL;
self->length = 0;
} else {
self->samples = FLOAT_ARRAY(buffer->samples) + startIdx;
self->length = length;
}
return self;
}
void sig_BufferView_destroy(struct sig_Allocator* allocator,
struct sig_Buffer* self) {
// Don't destroy the samples array;
// it is shared with other Buffers.
sig_Allocator_free(allocator, self);
}
void sig_dsp_Signal_init(void* signal,
struct sig_AudioSettings* settings,
float_array_ptr output,
sig_dsp_generateFn generate) {
struct sig_dsp_Signal* self = (struct sig_dsp_Signal*) signal;
self->audioSettings = settings;
self->output = output;
self->generate = generate;
};
/**
* Generic generation function
* that operates on any Signal and outputs only silence.
*/
void sig_dsp_Signal_generate(void* signal) {
struct sig_dsp_Signal* self = (struct sig_dsp_Signal*) signal;
sig_fillWithSilence(self->output, self->audioSettings->blockSize);
}
void sig_dsp_Signal_destroy(struct sig_Allocator* allocator, void* signal) {
sig_Allocator_free(allocator,
((struct sig_dsp_Signal*) signal)->output);
sig_Allocator_free(allocator, signal);
}
void sig_dsp_Value_init(struct sig_dsp_Value* self,
struct sig_AudioSettings *settings,
float_array_ptr output) {
struct sig_dsp_Value_Parameters params = {
.value = 1.0
};
sig_dsp_Signal_init(self, settings, output,
*sig_dsp_Value_generate);
self->parameters = params;
}
struct sig_dsp_Value* sig_dsp_Value_new(struct sig_Allocator* allocator,
struct sig_AudioSettings* settings) {
float_array_ptr output = sig_AudioBlock_new(allocator, settings);
struct sig_dsp_Value* self = sig_Allocator_malloc(allocator,
sizeof(struct sig_dsp_Value));
sig_dsp_Value_init(self, settings, output);
return self;
}
void sig_dsp_Value_destroy(struct sig_Allocator* allocator, struct sig_dsp_Value* self) {
sig_dsp_Signal_destroy(allocator, (void*) self);
}
void sig_dsp_Value_generate(void* signal) {
struct sig_dsp_Value* self = (struct sig_dsp_Value*) signal;
if (self->parameters.value == self->lastSample) {
return;
}
for (size_t i = 0; i < self->signal.audioSettings->blockSize; i++) {
FLOAT_ARRAY(self->signal.output)[i] = self->parameters.value;
}
self->lastSample = self->parameters.value;
}
struct sig_dsp_BinaryOp* sig_dsp_Add_new(
struct sig_Allocator* allocator,
struct sig_AudioSettings* settings,
struct sig_dsp_BinaryOp_Inputs* inputs) {
float_array_ptr output = sig_AudioBlock_new(allocator, settings);
struct sig_dsp_BinaryOp* self = sig_Allocator_malloc(allocator,
sizeof(struct sig_dsp_BinaryOp));
sig_dsp_Add_init(self, settings, inputs, output);
return self;
}
void sig_dsp_Add_init(struct sig_dsp_BinaryOp* self,
struct sig_AudioSettings* settings,
struct sig_dsp_BinaryOp_Inputs* inputs,
float_array_ptr output) {
sig_dsp_Signal_init(self, settings, output,
*sig_dsp_Add_generate);
self->inputs = inputs;
}
// TODO: Unit tests.
void sig_dsp_Add_generate(void* signal) {
struct sig_dsp_BinaryOp* self = (struct sig_dsp_BinaryOp*) signal;
for (size_t i = 0; i < self->signal.audioSettings->blockSize; i++) {
float left = FLOAT_ARRAY(self->inputs->left)[i];
float right = FLOAT_ARRAY(self->inputs->right)[i];
float val = left + right;
FLOAT_ARRAY(self->signal.output)[i] = val;
}
}
void sig_dsp_Add_destroy(struct sig_Allocator* allocator,
struct sig_dsp_BinaryOp* self) {
sig_dsp_Signal_destroy(allocator, self);
}
void sig_dsp_Mul_init(struct sig_dsp_BinaryOp* self,
struct sig_AudioSettings* settings, struct sig_dsp_BinaryOp_Inputs* inputs, float_array_ptr output) {
sig_dsp_Signal_init(self, settings, output, *sig_dsp_Mul_generate);
self->inputs = inputs;
};
struct sig_dsp_BinaryOp* sig_dsp_Mul_new(struct sig_Allocator* allocator,
struct sig_AudioSettings* settings,
struct sig_dsp_BinaryOp_Inputs* inputs) {
float_array_ptr output = sig_AudioBlock_new(allocator, settings);
struct sig_dsp_BinaryOp* self = sig_Allocator_malloc(allocator,
sizeof(struct sig_dsp_BinaryOp));
sig_dsp_Mul_init(self, settings, inputs, output);
return self;
}
void sig_dsp_Mul_destroy(struct sig_Allocator* allocator,
struct sig_dsp_BinaryOp* self) {
sig_dsp_Signal_destroy(allocator, (void*) self);
};
void sig_dsp_Mul_generate(void* signal) {
struct sig_dsp_BinaryOp* self = (struct sig_dsp_BinaryOp*) signal;
for (size_t i = 0; i < self->signal.audioSettings->blockSize; i++) {
float left = FLOAT_ARRAY(self->inputs->left)[i];
float right = FLOAT_ARRAY(self->inputs->right)[i];
float val = left * right;
FLOAT_ARRAY(self->signal.output)[i] = val;
}
}
struct sig_dsp_BinaryOp* sig_dsp_Div_new(
struct sig_Allocator* allocator,
struct sig_AudioSettings* settings,
struct sig_dsp_BinaryOp_Inputs* inputs) {
float_array_ptr output = sig_AudioBlock_new(allocator, settings);
struct sig_dsp_BinaryOp* self = sig_Allocator_malloc(allocator,
sizeof(struct sig_dsp_BinaryOp));
sig_dsp_Div_init(self, settings, inputs, output);
return self;
}
void sig_dsp_Div_init(struct sig_dsp_BinaryOp* self,
struct sig_AudioSettings* settings,
struct sig_dsp_BinaryOp_Inputs* inputs,
float_array_ptr output) {
sig_dsp_Signal_init(self, settings, output,
*sig_dsp_Div_generate);
self->inputs = inputs;
}
void sig_dsp_Div_generate(void* signal) {
struct sig_dsp_BinaryOp* self = (struct sig_dsp_BinaryOp*) signal;
for (size_t i = 0; i < self->signal.audioSettings->blockSize; i++) {
float left = FLOAT_ARRAY(self->inputs->left)[i];
float right = FLOAT_ARRAY(self->inputs->right)[i];
float val = left / right;
FLOAT_ARRAY(self->signal.output)[i] = val;
}
}
void sig_dsp_Div_destroy(struct sig_Allocator* allocator,
struct sig_dsp_BinaryOp* self) {
sig_dsp_Signal_destroy(allocator, self);
}
struct sig_dsp_Invert* sig_dsp_Invert_new(
struct sig_Allocator* allocator,
struct sig_AudioSettings* settings,
struct sig_dsp_Invert_Inputs* inputs) {
float_array_ptr output = sig_AudioBlock_new(allocator, settings);
struct sig_dsp_Invert* self = sig_Allocator_malloc(allocator,
sizeof(struct sig_dsp_Invert));
sig_dsp_Invert_init(self, settings, inputs, output);
return self;
}
void sig_dsp_Invert_init(struct sig_dsp_Invert* self,
struct sig_AudioSettings* settings,
struct sig_dsp_Invert_Inputs* inputs,
float_array_ptr output) {
sig_dsp_Signal_init(self, settings, output,
*sig_dsp_Invert_generate);
self->inputs = inputs;
}
// TODO: Unit tests.
void sig_dsp_Invert_generate(void* signal) {
struct sig_dsp_Invert* self = (struct sig_dsp_Invert*) signal;
for (size_t i = 0; i < self->signal.audioSettings->blockSize; i++) {
float inSamp = FLOAT_ARRAY(self->inputs->source)[i];
FLOAT_ARRAY(self->signal.output)[i] = -inSamp;
}
}
void sig_dsp_Invert_destroy(struct sig_Allocator* allocator,
struct sig_dsp_Invert* self) {
sig_dsp_Signal_destroy(allocator, self);
}
struct sig_dsp_Accumulate* sig_dsp_Accumulate_new(
struct sig_Allocator* allocator,
struct sig_AudioSettings* settings,
struct sig_dsp_Accumulate_Inputs* inputs) {
float_array_ptr output = sig_AudioBlock_new(allocator, settings);
struct sig_dsp_Accumulate* self = sig_Allocator_malloc(
allocator,
sizeof(struct sig_dsp_Accumulate));
sig_dsp_Accumulate_init(self, settings, inputs, output);
return self;
}
void sig_dsp_Accumulate_init(
struct sig_dsp_Accumulate* self,
struct sig_AudioSettings* settings,
struct sig_dsp_Accumulate_Inputs* inputs,
float_array_ptr output) {
sig_dsp_Signal_init(self, settings, output,
*sig_dsp_Accumulate_generate);
struct sig_dsp_Accumulate_Parameters parameters = {
.accumulatorStart = 1.0
};
self->inputs = inputs;
self->parameters = parameters;
self->accumulator = parameters.accumulatorStart;
self->previousReset = 0.0f;
}
// TODO: Implement an audio rate version of this signal.
// TODO: Unit tests
void sig_dsp_Accumulate_generate(void* signal) {
struct sig_dsp_Accumulate* self =
(struct sig_dsp_Accumulate*) signal;
float reset = FLOAT_ARRAY(self->inputs->reset)[0];
if (reset > 0.0f && self->previousReset <= 0.0f) {
// Reset the accumulator if we received a trigger.
self->accumulator = self->parameters.accumulatorStart;
}
self->accumulator += FLOAT_ARRAY(self->inputs->source)[0];
for (size_t i = 0; i < self->signal.audioSettings->blockSize; i++) {
FLOAT_ARRAY(self->signal.output)[i] = self->accumulator;
}
self->previousReset = reset;
}
void sig_dsp_Accumulate_destroy(struct sig_Allocator* allocator,
struct sig_dsp_Accumulate* self) {
sig_dsp_Signal_destroy(allocator, (void*) self);
}
struct sig_dsp_GatedTimer* sig_dsp_GatedTimer_new(
struct sig_Allocator* allocator,
struct sig_AudioSettings* settings,
struct sig_dsp_GatedTimer_Inputs* inputs) {
float_array_ptr output = sig_AudioBlock_new(allocator, settings);
struct sig_dsp_GatedTimer* self = sig_Allocator_malloc(allocator,
sizeof(struct sig_dsp_GatedTimer));
sig_dsp_GatedTimer_init(self, settings, inputs, output);
return self;
}
void sig_dsp_GatedTimer_init(struct sig_dsp_GatedTimer* self,
struct sig_AudioSettings* settings,
struct sig_dsp_GatedTimer_Inputs* inputs,
float_array_ptr output) {
sig_dsp_Signal_init(self, settings, output,
*sig_dsp_GatedTimer_generate);
self->inputs = inputs;
self->timer = 0;
self->hasFired = false;
self->prevGate = 0.0f;
}
// TODO: Unit tests
void sig_dsp_GatedTimer_generate(void* signal) {
struct sig_dsp_GatedTimer* self =
(struct sig_dsp_GatedTimer*) signal;
for (size_t i = 0; i < self->signal.audioSettings->blockSize; i++) {
// TODO: MSVC compiler warning loss of precision.
unsigned long durationSamps = (unsigned long)
FLOAT_ARRAY(self->inputs->duration)[i] *
self->signal.audioSettings->sampleRate;
float gate = FLOAT_ARRAY(self->inputs->gate)[i];
if (gate > 0.0f) {
// Gate is open.
if (!self->hasFired ||
FLOAT_ARRAY(self->inputs->loop)[i] > 0.0f) {
self->timer++;
}
if (self->timer >= durationSamps) {
// We reached the duration time.
FLOAT_ARRAY(self->signal.output)[i] = 1.0f;
// Reset the timer counter and note
// that we've already fired while
// this gate was open.
self->timer = 0;
self->hasFired = true;
continue;
}
} else if (gate <= 0.0f && self->prevGate > 0.0f) {
// Gate just closed. Reset all timer state.
self->timer = 0;
self->hasFired = false;
}
FLOAT_ARRAY(self->signal.output)[i] = 0.0f;
self->prevGate = gate;
}
}
void sig_dsp_GatedTimer_destroy(struct sig_Allocator* allocator,
struct sig_dsp_GatedTimer* self) {
sig_dsp_Signal_destroy(allocator, (void*) self);
}
struct sig_dsp_TimedTriggerCounter* sig_dsp_TimedTriggerCounter_new(
struct sig_Allocator* allocator,
struct sig_AudioSettings* settings,
struct sig_dsp_TimedTriggerCounter_Inputs* inputs) {
float_array_ptr output = sig_AudioBlock_new(allocator, settings);
struct sig_dsp_TimedTriggerCounter* self = sig_Allocator_malloc(
allocator,
sizeof(struct sig_dsp_TimedTriggerCounter));
sig_dsp_TimedTriggerCounter_init(self, settings, inputs, output);
return self;
}
void sig_dsp_TimedTriggerCounter_init(
struct sig_dsp_TimedTriggerCounter* self,
struct sig_AudioSettings* settings,
struct sig_dsp_TimedTriggerCounter_Inputs* inputs,
float_array_ptr output) {
sig_dsp_Signal_init(self, settings, output,
*sig_dsp_TimedTriggerCounter_generate);
self->inputs = inputs;
self->numTriggers = 0;
self->timer = 0;
self->isTimerActive = false;
self->previousSource = 0.0f;
}
void sig_dsp_TimedTriggerCounter_generate(void* signal) {
struct sig_dsp_TimedTriggerCounter* self =
(struct sig_dsp_TimedTriggerCounter*) signal;
for (size_t i = 0; i < self->signal.audioSettings->blockSize; i++) {
float source = FLOAT_ARRAY(self->inputs->source)[i];
float outputSample = 0.0f;
if (source > 0.0f && self->previousSource == 0.0f) {
// Received the rising edge of a trigger.
if (!self->isTimerActive) {
// It's the first trigger,
// so start the timer.
self->isTimerActive = true;
}
}
if (self->isTimerActive) {
// The timer is running.
if (source <= 0.0f && self->previousSource > 0.0f) {
// Received the falling edge of a trigger,
// so count it.
self->numTriggers++;
}
self->timer++;
// Truncate the duration to the nearest sample.
long durSamps = (long) (FLOAT_ARRAY(
self->inputs->duration)[i] *
self->signal.audioSettings->sampleRate);
if (self->timer >= durSamps) {
// Time's up.
// Fire a trigger if we've the right number of
// incoming triggers, otherwise just reset.
if (self->numTriggers ==
(int) FLOAT_ARRAY(self->inputs->count)[i]) {
outputSample = 1.0f;
}
self->isTimerActive = false;
self->numTriggers = 0;
self->timer = 0;
}
}
self->previousSource = source;
FLOAT_ARRAY(self->signal.output)[i] = outputSample;
}
}
void sig_dsp_TimedTriggerCounter_destroy(
struct sig_Allocator* allocator,
struct sig_dsp_TimedTriggerCounter* self) {
sig_dsp_Signal_destroy(allocator, (void*) self);
}
struct sig_dsp_ToggleGate* sig_dsp_ToggleGate_new(
struct sig_Allocator* allocator,
struct sig_AudioSettings* settings,
struct sig_dsp_ToggleGate_Inputs* inputs) {
float_array_ptr output = sig_AudioBlock_new(allocator, settings);
struct sig_dsp_ToggleGate* self = sig_Allocator_malloc(
allocator, sizeof(struct sig_dsp_ToggleGate));
sig_dsp_ToggleGate_init(self, settings, inputs, output);
return self;
}
void sig_dsp_ToggleGate_init(
struct sig_dsp_ToggleGate* self,
struct sig_AudioSettings* settings,
struct sig_dsp_ToggleGate_Inputs* inputs,
float_array_ptr output) {
sig_dsp_Signal_init(self, settings, output,
*sig_dsp_ToggleGate_generate);
self->inputs = inputs;
self->isGateOpen = false;
self->prevTrig = 0.0f;
}
// TODO: Unit tests
void sig_dsp_ToggleGate_generate(void* signal) {
struct sig_dsp_ToggleGate* self =
(struct sig_dsp_ToggleGate*) signal;
for (size_t i = 0; i < self->signal.audioSettings->blockSize; i++) {
float trigger = FLOAT_ARRAY(self->inputs->trigger)[i];
if (trigger > 0.0f && self->prevTrig <= 0.0f) {
// Received a trigger, toggle the gate.
self->isGateOpen = !self->isGateOpen;
}
FLOAT_ARRAY(self->signal.output)[i] = (float) self->isGateOpen;
self->prevTrig = trigger;
}
}
void sig_dsp_ToggleGate_destroy(
struct sig_Allocator* allocator,
struct sig_dsp_ToggleGate* self) {
sig_dsp_Signal_destroy(allocator, (void*) self);
}
void sig_dsp_Sine_init(struct sig_dsp_Sine* self,
struct sig_AudioSettings* settings,
struct sig_dsp_Sine_Inputs* inputs,
float_array_ptr output) {
sig_dsp_Signal_init(self, settings, output,
*sig_dsp_Sine_generate);
self->inputs = inputs;
self->phaseAccumulator = 0.0f;
}
struct sig_dsp_Sine* sig_dsp_Sine_new(struct sig_Allocator* allocator,
struct sig_AudioSettings* settings,
struct sig_dsp_Sine_Inputs* inputs) {
float_array_ptr output = sig_AudioBlock_new(allocator, settings);
struct sig_dsp_Sine* self = sig_Allocator_malloc(allocator,
sizeof(struct sig_dsp_Sine));
sig_dsp_Sine_init(self, settings, inputs, output);
return self;
}
void sig_dsp_Sine_destroy(struct sig_Allocator* allocator, struct sig_dsp_Sine* self) {
sig_dsp_Signal_destroy(allocator, (void*) self);
}
void sig_dsp_Sine_generate(void* signal) {
struct sig_dsp_Sine* self = (struct sig_dsp_Sine*) signal;
for (size_t i = 0; i < self->signal.audioSettings->blockSize; i++) {
float modulatedPhase = fmodf(self->phaseAccumulator +
FLOAT_ARRAY(self->inputs->phaseOffset)[i], sig_TWOPI);
FLOAT_ARRAY(self->signal.output)[i] = sinf(modulatedPhase) *
FLOAT_ARRAY(self->inputs->mul)[i] +
FLOAT_ARRAY(self->inputs->add)[i];
float phaseStep = FLOAT_ARRAY(self->inputs->freq)[i] /
self->signal.audioSettings->sampleRate * sig_TWOPI;
self->phaseAccumulator += phaseStep;
if (self->phaseAccumulator > sig_TWOPI) {
self->phaseAccumulator -= sig_TWOPI;
}
}
}
void sig_dsp_OnePole_init(struct sig_dsp_OnePole* self,
struct sig_AudioSettings* settings,
struct sig_dsp_OnePole_Inputs* inputs,
float_array_ptr output) {
sig_dsp_Signal_init(self, settings, output,
*sig_dsp_OnePole_generate);
self->inputs = inputs;
self->previousSample = 0.0f;
}
struct sig_dsp_OnePole* sig_dsp_OnePole_new(
struct sig_Allocator* allocator,
struct sig_AudioSettings* settings,
struct sig_dsp_OnePole_Inputs* inputs) {
float_array_ptr output = sig_AudioBlock_new(allocator, settings);
struct sig_dsp_OnePole* self = sig_Allocator_malloc(allocator,
sizeof(struct sig_dsp_OnePole));
sig_dsp_OnePole_init(self, settings, inputs, output);
return self;
}
// TODO: Unit tests
void sig_dsp_OnePole_generate(void* signal) {
struct sig_dsp_OnePole* self = (struct sig_dsp_OnePole*) signal;
float previousSample = self->previousSample;
for (size_t i = 0; i < self->signal.audioSettings->blockSize; i++) {
FLOAT_ARRAY(self->signal.output)[i] = previousSample =
sig_filter_onepole(
FLOAT_ARRAY(self->inputs->source)[i], previousSample,
FLOAT_ARRAY(self->inputs->coefficient)[i]);
}
self->previousSample = previousSample;
}
void sig_dsp_OnePole_destroy(struct sig_Allocator* allocator,
struct sig_dsp_OnePole* self) {
sig_dsp_Signal_destroy(allocator, (void*) self);
}
void sig_dsp_Tanh_init(struct sig_dsp_Tanh* self,
struct sig_AudioSettings* settings,
struct sig_dsp_Tanh_Inputs* inputs,
float_array_ptr output) {
sig_dsp_Signal_init(self, settings, output,
*sig_dsp_Tanh_generate);
self->inputs = inputs;
}
struct sig_dsp_Tanh* sig_dsp_Tanh_new(
struct sig_Allocator* allocator,
struct sig_AudioSettings* settings,
struct sig_dsp_Tanh_Inputs* inputs) {
float_array_ptr output = sig_AudioBlock_new(allocator, settings);
struct sig_dsp_Tanh* self = sig_Allocator_malloc(allocator,
sizeof(struct sig_dsp_Tanh));
sig_dsp_Tanh_init(self, settings, inputs, output);
return self;
}
// TODO: Unit tests.
void sig_dsp_Tanh_generate(void* signal) {
struct sig_dsp_Tanh* self = (struct sig_dsp_Tanh*) signal;
for (size_t i = 0; i < self->signal.audioSettings->blockSize; i++) {
float inSamp = FLOAT_ARRAY(self->inputs->source)[i];
float outSamp = tanhf(inSamp);
FLOAT_ARRAY(self->signal.output)[i] = outSamp;
}
}
void sig_dsp_Tanh_destroy(struct sig_Allocator* allocator,
struct sig_dsp_Tanh* self) {
sig_dsp_Signal_destroy(allocator, self);
}
void sig_dsp_Looper_init(struct sig_dsp_Looper* self,
struct sig_AudioSettings* settings,
struct sig_dsp_Looper_Inputs* inputs,
float_array_ptr output) {
sig_dsp_Signal_init(self, settings, output,
*sig_dsp_Looper_generate);
self->inputs = inputs;
self->isBufferEmpty = true;
self->previousRecord = 0.0f;
self->playbackPos = 0.0f;
// TODO: Deal with how buffers get here.
self->buffer = NULL;
}
struct sig_dsp_Looper* sig_dsp_Looper_new(
struct sig_Allocator* allocator,
struct sig_AudioSettings* settings,
struct sig_dsp_Looper_Inputs* inputs) {
float_array_ptr output = sig_AudioBlock_new(allocator, settings);
struct sig_dsp_Looper* self = sig_Allocator_malloc(allocator,
sizeof(struct sig_dsp_Looper));
sig_dsp_Looper_init(self, settings, inputs, output);
return self;
}
// TODO:
// * Reduce clicks by crossfading the end and start of the window.
// - should it be a true cross fade, requiring a few samples
// on each end of the clip, or a very quick fade in/out
// (e.g. 1-10ms/48-480 samples)?
// * Fade out before clearing. A whole loop's duration, or shorter?
// * Address glitches when the length is very short
// * Should we check if the buffer is null and output silence,
// or should this be considered a user error?
// (Or should we introduce some kind of validation function for signals?)
// * Unit tests
void sig_dsp_Looper_generate(void* signal) {
struct sig_dsp_Looper* self = (struct sig_dsp_Looper*) signal;
float* samples = FLOAT_ARRAY(self->buffer->samples);
float playbackPos = self->playbackPos;
float bufferLastIdx = (float)(self->buffer->length - 1);
for (size_t i = 0; i < self->signal.audioSettings->blockSize; i++) {
float speed = FLOAT_ARRAY(self->inputs->speed)[i];
float start = sig_clamp(FLOAT_ARRAY(self->inputs->start)[i],
0.0, 1.0);
float end = sig_clamp(FLOAT_ARRAY(self->inputs->end)[i],
0.0, 1.0);
// Flip the start and end points if they're reversed.
if (start > end) {
float temp = start;
start = end;
end = temp;
}
float startPos = roundf(bufferLastIdx * start);
float endPos = roundf(bufferLastIdx * end);
// If the loop size is smaller than the speed
// we're playing back at, just output silence.
if ((endPos - startPos) <= fabsf(speed)) {
FLOAT_ARRAY(self->signal.output)[i] = 0.0f;
continue;
}
if (FLOAT_ARRAY(self->inputs->record)[i] > 0.0f) {
// We're recording.
if (self->previousRecord <= 0.0f) {
// We've just started recording.
if (self->isBufferEmpty) {
// This is the first overdub.
playbackPos = startPos;
}
}
// Playback has to be at regular speed
// while recording, so ignore any modulation
// and only use its direction.
// TODO: Add support for cool tape-style effects
// when overdubbing at different speeds.
// Note: Naively omitting this will work,
// but introduces lots pitched resampling artifacts.
speed = speed > 0.0f ? 1.0f : -1.0f;
// Overdub the current audio input into the loop buffer.
size_t playbackIdx = (size_t) playbackPos;
float sample = samples[playbackIdx] +
FLOAT_ARRAY(self->inputs->source)[i];
// Add a little distortion/limiting.
sample = tanhf(sample);
samples[playbackIdx] = sample;
// No interpolation is needed because we're
// playing/recording at regular speed.
FLOAT_ARRAY(self->signal.output)[i] = sample;
} else {
// We're playing back.
if (self->previousRecord > 0.0f) {
// We just finished recording.
self->isBufferEmpty = false;
}
if (FLOAT_ARRAY(self->inputs->clear)[i] > 0.0f &&
self->previousClear == 0.0f) {
// TODO: Fade out before clearing the buffer
// (gh-28)
sig_Buffer_fillWithSilence(self->buffer);
self->isBufferEmpty = true;
}
// TODO: The sig_interpolate_linear implementation
// may wrap around inappropriately to the beginning of
// the buffer (not to the startPos) if we're right at
// the end of the buffer.
FLOAT_ARRAY(self->signal.output)[i] = sig_interpolate_linear(
playbackPos, samples, self->buffer->length) +
FLOAT_ARRAY(self->inputs->source)[i];
}
playbackPos += speed;
if (playbackPos > endPos) {
playbackPos = startPos + (playbackPos - endPos);
} else if (playbackPos < startPos) {
playbackPos = endPos - (startPos - playbackPos);
}
self->previousRecord = FLOAT_ARRAY(self->inputs->record)[i];
self->previousClear = FLOAT_ARRAY(self->inputs->clear)[i];
}
self->playbackPos = playbackPos;
}
void sig_dsp_Looper_destroy(struct sig_Allocator* allocator,
struct sig_dsp_Looper* self) {
sig_dsp_Signal_destroy(allocator, self);
}
void sig_dsp_Dust_init(struct sig_dsp_Dust* self,
struct sig_AudioSettings* settings,
struct sig_dsp_Dust_Inputs* inputs,
float_array_ptr output) {
sig_dsp_Signal_init(self, settings, output,
*sig_dsp_Dust_generate);
struct sig_dsp_Dust_Parameters parameters = {
.bipolar = 0.0
};
self->inputs = inputs;
self->parameters = parameters;
self->sampleDuration = 1.0 / settings->sampleRate;
self->previousDensity = 0.0;
self->threshold = 0.0;
}
struct sig_dsp_Dust* sig_dsp_Dust_new(
struct sig_Allocator* allocator,
struct sig_AudioSettings* settings,
struct sig_dsp_Dust_Inputs* inputs) {
float_array_ptr output = sig_AudioBlock_new(allocator, settings);
struct sig_dsp_Dust* self = sig_Allocator_malloc(allocator,
sizeof(struct sig_dsp_Dust));
sig_dsp_Dust_init(self, settings, inputs, output);
return self;
}
void sig_dsp_Dust_generate(void* signal) {
struct sig_dsp_Dust* self = (struct sig_dsp_Dust*) signal;
float scaleDiv = self->parameters.bipolar > 0.0f ? 2.0f : 1.0f;
float scaleSub = self->parameters.bipolar > 0.0f ? 1.0f : 0.0f;
for (size_t i = 0; i < self->signal.audioSettings->blockSize; i++) {
float density = FLOAT_ARRAY(self->inputs->density)[i];
if (density != self->previousDensity) {
self->previousDensity = density;
self->threshold = density * self->sampleDuration;
self->scale = self->threshold > 0.0f ?
scaleDiv / self->threshold : 0.0f;
}
float rand = sig_randf();
float val = rand < self->threshold ?
rand * self->scale - scaleSub : 0.0f;
FLOAT_ARRAY(self->signal.output)[i] = val;
}
}
void sig_dsp_Dust_destroy(struct sig_Allocator* allocator,
struct sig_dsp_Dust* self) {
sig_dsp_Signal_destroy(allocator, self);
}
void sig_dsp_TimedGate_init(struct sig_dsp_TimedGate* self,
struct sig_AudioSettings* settings,
struct sig_dsp_TimedGate_Inputs* inputs,
float_array_ptr output) {
sig_dsp_Signal_init(self, settings, output,
*sig_dsp_TimedGate_generate);
struct sig_dsp_TimedGate_Parameters parameters = {
.resetOnTrigger = 0.0,
.bipolar = 0.0
};
self->inputs = inputs;
self->parameters = parameters;
self->previousTrigger = 0.0f;
self->previousDuration = 0.0f;
self->gateValue = 0.0f;
self->durationSamps = 0;
self->samplesRemaining = 0;
}
struct sig_dsp_TimedGate* sig_dsp_TimedGate_new(
struct sig_Allocator* allocator,
struct sig_AudioSettings* settings,
struct sig_dsp_TimedGate_Inputs* inputs) {
float_array_ptr output = sig_AudioBlock_new(allocator, settings);
struct sig_dsp_TimedGate* self = sig_Allocator_malloc(allocator,
sizeof(struct sig_dsp_TimedGate));
sig_dsp_TimedGate_init(self, settings, inputs, output);
return self;
}
static inline void sig_dsp_TimedGate_outputHigh(struct sig_dsp_TimedGate* self,
size_t index) {
FLOAT_ARRAY(self->signal.output)[index] = self->gateValue;
self->samplesRemaining--;
}
static inline void sig_dsp_TimedGate_outputLow(struct sig_dsp_TimedGate* self,
size_t index) {
FLOAT_ARRAY(self->signal.output)[index] = 0.0f;
}
void sig_dsp_TimedGate_generate(void* signal) {
struct sig_dsp_TimedGate* self = (struct sig_dsp_TimedGate*)
signal;
for (size_t i = 0; i < self->signal.audioSettings->blockSize; i++) {
float currentTrigger = FLOAT_ARRAY(self->inputs->trigger)[i];
float duration = FLOAT_ARRAY(self->inputs->duration)[i];
if ((currentTrigger > 0.0f && self->previousTrigger <= 0.0f) ||
(self->parameters.bipolar > 0.0 && currentTrigger < 0.0f && self->previousTrigger >= 0.0f)) {
// A new trigger was received.
self->gateValue = currentTrigger;
if (duration != self->previousDuration) {
// The duration input has changed.
self->durationSamps = lroundf(duration *
self->signal.audioSettings->sampleRate);
self->previousDuration = duration;
}
if (self->parameters.resetOnTrigger > 0.0f &&
self->samplesRemaining > 0) {
// Gate is open and needs to be reset.
// Close the gate for one sample,
// and don't count down the duration
// until next time.
sig_dsp_TimedGate_outputLow(self, i);
self->samplesRemaining = self->durationSamps;
} else {
self->samplesRemaining = self->durationSamps;
sig_dsp_TimedGate_outputHigh(self, i);
}
} else if (self->samplesRemaining > 0) {
sig_dsp_TimedGate_outputHigh(self, i);
} else {
sig_dsp_TimedGate_outputLow(self, i);
}
self->previousTrigger = currentTrigger;
}
}
void sig_dsp_TimedGate_destroy(struct sig_Allocator* allocator,
struct sig_dsp_TimedGate* self) {
sig_dsp_Signal_destroy(allocator, self);
}
void sig_dsp_ClockFreqDetector_init(
struct sig_dsp_ClockFreqDetector* self,
struct sig_AudioSettings* settings,
struct sig_dsp_ClockFreqDetector_Inputs* inputs,
float_array_ptr output) {
sig_dsp_Signal_init(self, settings, output,
*sig_dsp_ClockFreqDetector_generate);
struct sig_dsp_ClockFreqDetector_Parameters params = {
.threshold = 0.1f,
.timeoutDuration = 120.0f
};
self->inputs = inputs;
self->parameters = params;
self->previousTrigger = 0.0f;
self->samplesSinceLastPulse = 0;
self->clockFreq = 0.0f;
self->pulseDurSamples = 0;
}
struct sig_dsp_ClockFreqDetector* sig_dsp_ClockFreqDetector_new(
struct sig_Allocator* allocator,
struct sig_AudioSettings* settings,
struct sig_dsp_ClockFreqDetector_Inputs* inputs) {
float_array_ptr output = sig_AudioBlock_new(allocator, settings);
struct sig_dsp_ClockFreqDetector* self =
sig_Allocator_malloc(allocator,
sizeof(struct sig_dsp_ClockFreqDetector));
sig_dsp_ClockFreqDetector_init(self, settings, inputs, output);
return self;
}
static inline float sig_dsp_ClockFreqDetector_calcClockFreq(
float sampleRate, uint32_t samplesSinceLastPulse,
float prevFreq) {
float freq = sampleRate / (float) samplesSinceLastPulse;
// TODO: Is an LPF good, or is a moving average better?
return sig_filter_onepole(freq, prevFreq, 0.01f);
}
void sig_dsp_ClockFreqDetector_generate(void* signal) {
struct sig_dsp_ClockFreqDetector* self =
(struct sig_dsp_ClockFreqDetector*) signal;
float_array_ptr source = self->inputs->source;
float_array_ptr output = self->signal.output;
float previousTrigger = self->previousTrigger;
float clockFreq = self->clockFreq;
bool isRisingEdge = self->isRisingEdge;
uint32_t samplesSinceLastPulse = self->samplesSinceLastPulse;
float sampleRate = self->signal.audioSettings->sampleRate;
float threshold = self->parameters.threshold;
float timeoutDuration = self->parameters.timeoutDuration;
uint32_t pulseDurSamples = self->pulseDurSamples;
for (size_t i = 0; i < self->signal.audioSettings->blockSize; i++) {
samplesSinceLastPulse++;
float sourceSamp = FLOAT_ARRAY(source)[i];
if (sourceSamp > 0.0f && previousTrigger <= 0.0f) {
// Start of rising edge.
isRisingEdge = true;
} else if (sourceSamp < previousTrigger) {
// Failed to reach the threshold before
// the signal fell again.
isRisingEdge = false;
}
if (isRisingEdge && sourceSamp >= threshold) {
// Signal is rising and threshold has been reached,
// so this is a pulse.
clockFreq = sig_dsp_ClockFreqDetector_calcClockFreq(
sampleRate, samplesSinceLastPulse, clockFreq);
pulseDurSamples = samplesSinceLastPulse;
samplesSinceLastPulse = 0;
isRisingEdge = false;
} else if (samplesSinceLastPulse > sampleRate * timeoutDuration) {
// It's been too long since we've received a pulse.
clockFreq = 0.0f;
} else if (samplesSinceLastPulse > pulseDurSamples) {
// Tempo is slowing down; recalculate it.
clockFreq = sig_dsp_ClockFreqDetector_calcClockFreq(
sampleRate, samplesSinceLastPulse, clockFreq);
}
FLOAT_ARRAY(output)[i] = clockFreq;
previousTrigger = sourceSamp;
}
self->previousTrigger = previousTrigger;
self->clockFreq = clockFreq;
self->isRisingEdge = isRisingEdge;
self->samplesSinceLastPulse = samplesSinceLastPulse;
self->pulseDurSamples = pulseDurSamples;
}
void sig_dsp_ClockFreqDetector_destroy(struct sig_Allocator* allocator,
struct sig_dsp_ClockFreqDetector* self) {
sig_dsp_Signal_destroy(allocator, self);
}
| 32.74026 | 110 | 0.668222 | 3.015625 |
645ce9eb8368ef45d1d5f1eef02a4449c64f98e2 | 15,808 | lua | Lua | dataseries/init.lua | LuaDist-testing/torch-dataframe | d6437ce987e55779090552d3b81c66a156f535e5 | [
"MIT"
] | 77 | 2016-04-01T14:11:26.000Z | 2022-02-02T17:17:27.000Z | dataseries/init.lua | LuaDist-testing/torch-dataframe | d6437ce987e55779090552d3b81c66a156f535e5 | [
"MIT"
] | 26 | 2016-04-01T11:28:52.000Z | 2017-04-12T08:31:23.000Z | dataseries/init.lua | LuaDist-testing/torch-dataframe | d6437ce987e55779090552d3b81c66a156f535e5 | [
"MIT"
] | 11 | 2015-11-19T11:14:19.000Z | 2022-02-17T09:50:46.000Z | -- Main Dataseries file
require 'torch'
local argcheck = require "argcheck"
local doc = require "argcheck.doc"
doc[[
## Dataseries
The Dataseries is an array of data with an additional layer
of missing data info. The class contains two main elements:
* A data container
* A hash with the missing data positions
The missing data are presented as `nan` values. A `nan` has the
behavior that `nan ~= nan` evaluates to `true`. There is a helper
function in the package, `isnan()`, that can be used to identify
`nan` values.
The class has the following metatable functions available:
* `__index__`: You can access any element by `[]`
* `__newindex__`: You can set the value of an element via `[]`
* `__len__`: The `#` returns the length of the series
]]
-- create class object
local Dataseries, parent_class = torch.class('Dataseries', 'tnt.Dataset')
Dataseries.__init = argcheck{
doc = [[
<a name="Dataseries.__init">
### Dataseries.__init(@ARGP)
Creates and initializes an empty Dataseries. Envoked through `local my_series = Dataseries()`.
The type can be:
- boolean
- integer
- double
- string
- torch tensor or tds.Vec
@ARGT
]],
{name="self", type="Dataseries"},
{name="type", type="string", doc="The type of data storage to init.", default="string"},
call=function(self, type)
parent_class.__init(self)
self.data = self.new_storage(0, type)
self.missing = tds.Hash()
self._variable_type = type
end}
Dataseries.__init = argcheck{
doc = [[
### Dataseries.__init(@ARGP)
Creates and initializes a Dataseries class. Envoked through `local my_series = Dataseries()`.
The type can be:
- boolean
- integer
- double
- string
- torch tensor or tds.Vec
@ARGT
]],
overload=Dataseries.__init,
{name="self", type="Dataseries"},
{name="size", type="number", doc="The size of the new series"},
{name="type", type="string", doc="The type of data storage to init.", opt=true},
call=function(self, size, type)
assert(isint(size) and size >= 0, "Size has to be a positive integer")
parent_class.__init(self)
self.data = self.new_storage(size, type)
self.missing = tds.Hash()
self._variable_type = type
end}
Dataseries.__init = argcheck{
doc = [[
### Dataseries.__init(@ARGP)
Creates and initializes a Dataseries with a given Tensor or Vector. Envoked through `local my_series = Dataseries(myData)`.
The data can be a torch tensor or a tds.Vec.
@ARGT
]],
{name="self", type="Dataseries"},
{name="data", type="torch.*Tensor|tds.Vec"},
overload=Dataseries.__init,
call=function(self, data)
local size
local thname = torch.type(data)
if (thname:match("^tds")) then
size = #data
else
size = data:size(1)
end
-- Create the basic datastructures
self:__init(size, thname)
-- Copy values
for i=1,size do
self:set(i, data[i])
end
end}
Dataseries.__init = argcheck{
doc = [[
### Dataseries.__init(@ARGP)
Creates and initializes a Dataseries with a given Df_Array. Envoked through `local my_series = Dataseries(Df_Array(myTable))`.
@ARGT
]],
{name="self", type="Dataseries"},
{name="data", type="Df_Array"},
{name="max_elmnts4type", type="number",
doc="The maximum number of elements to traverse before settling a type",
default=1e3},
overload=Dataseries.__init,
call=function(self, data, max_elmnts4type)
data = data.data
max_elmnts4type = math.min(#data, max_elmnts4type)
local type = nil
for i=1,max_elmnts4type do
type = get_variable_type{value = data[i], prev_type = type}
end
-- Create the basic datastructures
self:__init(#data, type)
-- Copy values
for i=1,#data do
self:set(i, data[i])
end
end}
Dataseries.load = argcheck{
doc=[[
<a name="Dataseries.load">
### Dataseries.load(@ARGP)
Load a Tensor or tds.Vec without checking type or missing values.
@ARGT
_Return value_: self
]],
{name="self", type="Dataseries"},
{name="data", type="torch.*Tensor|tds.Vec", doc="data to load"},
call=function(self, data)
self.data = data
self.missing = tds.Hash()
self._variable_type = torch.type(self.data)
return self
end}
Dataseries.new_storage = argcheck{
doc = [[
<a name="Dataseries.new_storage">
### Dataseries.new_storage(@ARGP)
Internal method to retrieve a storage element for the Dataseries. The type can be:
- boolean
- integer
- double
- string
- torch tensor or tds.Vec
@ARGT
]],
{name="size", type="number", doc="The size of the storage"},
{name="type", type="string", doc="The type of data storage to initialize", default="string"},
call = function(size, type)
if (type == "integer") then
return torch.IntTensor(size)
end
if (type == "long") then
return torch.LongTensor(size)
end
if (type == "double") then
return torch.DoubleTensor(size)
end
if (type == "boolean" or
type == "string" or
type == "tds.Vec" or
type == nil) then
local data = tds.Vec()
if (size > 0) then
data:resize(size)
end
return data
end
if (type:match("torch.*Tensor")) then
return torch.Tensor(size):type(type)
end
assert(false, ("The type '%s' has not yet been implemented"):format(type))
end}
Dataseries.copy = argcheck{
doc=[[
<a name="Dataseries.copy">
### Dataseries.copy(@ARGP)
Creates a new Dataseries and with a copy/clone of the current data
@ARGT
_Return value_: Dataseries
]],
{name="self", type="Dataseries"},
{name="type", type="string", opt=true,
doc="Specify type if you want other type than the current"},
call=function(self, type)
type = type or self:get_variable_type()
local ret = Dataseries.new(#self, type)
for i=1,#self do
ret:set(i, self:get(i))
end
return ret
end}
-- Function that copies another dataset into the current together with all the
-- metadata
Dataseries._replace_data = argcheck{
{name="self", type="Dataseries"},
{name="new_data", type="Dataseries"},
call=function(self, new_data)
assert(self:size() == new_data:size(), "Can't replace when of different size")
for k,val in pairs(new_data) do
self[k] = val
end
return self
end}
Dataseries.size = argcheck{
doc=[[
<a name="Dataseries.size">
### Dataseries.size(@ARGP)
Returns the number of elements in the Dataseries
@ARGT
_Return value_: number
]],
{name="self", type="Dataseries"},
call=function(self)
if (self:is_tensor()) then
return self.data:nElement()
else
return #self.data
end
end}
Dataseries.resize = argcheck{
doc=[[
<a name="Dataseries.resize">
### Dataseries.resize(@ARGP)
Resizes the underlying storage to the new size. If the size is shrunk
then it also clears any missing values in the hash. If the size is increased
the new values are automatically set to missing.
@ARGT
_Return value_: self
]],
{name="self", type="Dataseries"},
{name="new_size", type="number", doc="The new size for the series"},
call=function(self, new_size)
local current_size = self:size()
if (current_size < new_size) then
self.data:resize(new_size)
for i = (current_size + 1), new_size do
self.missing[i] = true
end
elseif(current_size > new_size) then
self.data:resize(new_size)
for i = (new_size + 1),current_size do
self.missing[i] = nil
end
end
return self
end}
Dataseries.assert_is_index = argcheck{
doc=[[
<a name="Dataseries.assert_is_index">
### Dataseries.assert_is_index(@ARGP)
Assertion that checks if index is an integer and within the span of the series
@ARGT
_Return value_: self
]],
{name="self", type="Dataseries"},
{name="index", type="number", doc="The index to check"},
{name = "plus_one", type = "boolean", default = false,
doc= "Count next non-existing index as good. When adding rows, an index of size(1) + 1 is OK"},
call = function(self, index, plus_one)
if (plus_one) then
if (not isint(index) or
index < 0 or
index > self:size() + 1) then
assert(false, ("The index has to be an integer between 1 and %d - you've provided %s"):
format(self:size() + 1, index))
end
else
if (not isint(index) or
index < 0 or
index > self:size()) then
assert(false, ("The index has to be an integer between 1 and %d - you've provided %s"):
format(self:size(), index))
end
end
return true
end}
Dataseries.is_tensor = argcheck{
doc = [[
<a name="Dataseries.is_numerical">
### Dataseries.is_numerical(@ARGP)
Checks if tensor
@ARGT
_Return value_: boolean
]],
{name="self", type="Dataseries"},
call=function(self)
if (torch.type(self.data):match(("torch.*Tensor"))) then
return true
else
return false
end
end}
Dataseries.is_numerical = argcheck{
doc = [[
<a name="Dataseries.is_numerical">
### Dataseries.is_numerical(@ARGP)
Checks if numerical
@ARGT
_Return value_: boolean
]],
{name="self", type="Dataseries"},
call=function(self)
return self:get_variable_type() == "integer" or
self:get_variable_type() == "long" or
self:get_variable_type() == "double"
end}
Dataseries.is_boolean = argcheck{
doc = [[
<a name="Dataseries.is_boolean">
### Dataseries.is_boolean(@ARGP)
Checks if boolean
@ARGT
_Return value_: boolean
]],
{name="self", type="Dataseries"},
call=function(self)
return self:get_variable_type() == "boolean"
end}
Dataseries.is_string = argcheck{
doc = [[
<a name="Dataseries.is_string">
### Dataseries.is_string(@ARGP)
Checks if boolean
@ARGT
_Return value_: boolean
]],
{name="self", type="Dataseries"},
call=function(self)
return self:get_variable_type() == "string"
end}
Dataseries.type = argcheck{
doc=[[
<a name="Dataseries.type">
### Dataseries.type(@ARGP)
Gets the torch.typename of the storage
@ARGT
_Return value_: string
]],
{name="self", type="Dataseries"},
call=function(self)
return torch.typename(self.data)
end}
-- TODO : Change method name to something more explicit to avoid confusion between
-- getting type and changing type (information VS action).
-- name proposition : astype (inspired from pandas)
Dataseries.type = argcheck{
doc=[[
You can also set the type by calling type with a type argument
@ARGT
_Return value_: self
]],
{name="self", type="Dataseries"},
{name="type", type="string", doc="The type of column that you want to convert to"},
overload=Dataseries.type,
call=function(self, type)
local new_data = self:copy(type)
self:_replace_data(new_data)
return self
end}
Dataseries.get_variable_type = argcheck{
doc=[[
<a name="Dataseries.get_variable_type">
### Dataseries.get_variable_type(@ARGP)
Gets the variable type that was used to initiate the Dataseries
@ARGT
_Return value_: string
]],
{name="self", type="Dataseries"},
call=function(self)
return self._variable_type
end}
Dataseries.boolean2tensor = argcheck{
doc = [[
<a name="Dataseries.boolean2tensor">
### Dataseries.boolean2tensor(@ARGP)
Converts a boolean Dataseries into a torch.ByteTensor
@ARGT
_Return value_: self, boolean indicating successful conversion
]],
{name="self", type="Dataseries"},
{name="false_value", type="number",
doc="The numeric value for false"},
{name="true_value", type="number",
doc="The numeric value for true"},
call=function(self, false_value, true_value)
if (not self:is_boolean()) then
warning("The series isn't a boolean")
return self, false
end
-- Create a ByteTensor with the same size as the current dataseries and
-- fill it with false values
local data = torch.ByteTensor(self:size()):fill(false_value)
for i=1,self:size() do
local val = self:get(i)
if (not isnan(val)) then
if (val) then
data[i] = true_value
end
end
end
self.data = data
self._variable_type = "integer"
return self, true
end}
Dataseries.fill = argcheck{
doc = [[
<a name="Dataseries.fill">
### Dataseries.fill(@ARGP)
Fills all values with a default value
@ARGT
_Return value_: self
]],
{name="self", type="Dataseries"},
{name="default_value", type="number|string|boolean",
doc="The default value"},
call=function(self, default_value)
if (self:is_tensor()) then
self.data:fill(default_value)
else
for i=1,self:size() do
self:set(i, default_value)
end
end
return self
end}
Dataseries.fill_na = argcheck{
doc = [[
<a name="Dataseries.fill_na">
### Dataseries.fill_na(@ARGP)
Replace missing values with a specific value
@ARGT
_Return value_: self
]],
{name="self", type="Dataseries"},
{name="default_value", type="number|string|boolean",
doc="The default missing value", default=0},
call=function(self, default_value)
if (self:count_na() == 0) then
return self
end
if (self:is_categorical() and
not self:has_cat_key("__nan__")) then
assert(isint(default_value), "The default value has to be an integer")
assert(not self:has_cat_value(default_value),
"The value " .. default_value .. " is already present in the Dataseries")
self:add_cat_key("__nan__", default_value)
default_value = "__nan__"
end
if (self:is_tensor()) then
-- Get the mask differentiating values/missing_values
local mask = self:get_data_mask{missing = true}
-- Use this mask to only replace missing values
self.data:maskedFill(mask, default_value)
-- Reset missing values list
self.missing = tds.Hash()
else
-- Browse row by row
for pos,_ in pairs(self.missing) do
self:set(pos, default_value)
end
-- Here no need to reset missing values list, it is handled in `set()` method
end
return self
end}
Dataseries.tostring = argcheck{
doc = [[
<a name="Dataseries.tostring">
### Dataseries.tostring(@ARGP)
Converts the series into a string output
@ARGT
_Return value_: string
]],
{name="self", type="Dataseries"},
{name="max_elmnts", type="number", doc="Number of elements to convert",
default=20},
call=function(self, max_elmnts)
max_elmnts = math.min(self:size(), max_elmnts)
ret = ("Type: %s (%s)\nLength: %d\n-----"):
format(self:get_variable_type(), self:type(), self:size())
for i=1,max_elmnts do
ret = ret .. "\n" .. tostring(self:get(i))
end
if (max_elmnts < self:size()) then
ret = ret .. "\n..."
end
ret = ret .. "\n-----\n"
return ret
end}
-- TODO : use same logic as bulk_load_csv to extract a subset
Dataseries.sub = argcheck{
doc = [[
<a name="Dataseries.sub">
### Dataseries.sub(@ARGP)
Subsets the Dataseries to the element span
@ARGT
_Return value_: Dataseries
]],
{name="self", type="Dataseries"},
{name="start", type="number", default=1},
{name="stop", type="number", opt=true},
call=function(self, start, stop)
stop = stop or self:size()
assert(start <= stop,
("Start larger than stop, i.e. %d > %d"):format(start, stop))
self:assert_is_index(start)
self:assert_is_index(stop)
local ret = Dataseries.new(stop - start + 1, self:get_variable_type())
for idx = start,stop do
ret:set(idx + 1 - start, self:get(idx))
end
return ret
end}
Dataseries.eq = argcheck{
doc = [[
<a name="Dataseries.eq">
### Dataseries.eq(@ARGP)
Compares to Dataseries or table in order to see if they are identical
@ARGT
_Return value_: string
]],
{name="self", type="Dataseries"},
{name="other", type="Dataseries|table"},
call=function(self, other)
if (self:size() ~= #other) then
return false
end
for i=1,self:size() do
if (self:get(i) ~= other[i]) then
return false
end
end
return true
end}
Dataseries.get_data_mask = argcheck{
doc=[[
<a name="Dataseries.get_data_mask">
### Dataseries.get_data_mask(@ARGP)
Retrieves a mask that can be used to select missing or active values
@ARGT
_Return value_: torch.ByteTensor
]],
{name="self", type="Dataseries"},
{name="missing", type="boolean", default=false,
doc="Set to true if you want only the missing values"},
call=function(self, missing)
local fill_value = 1
local missing_value = 0
if (missing) then
fill_value = 0
missing_value = 1
end
-- Create a ByteTensor with the same size as the current dataseries and
-- fill it with defined filling value
local mask = torch.ByteTensor():resize(self:size()):fill(fill_value)
for i,_ in pairs(self.missing) do
mask[i] = missing_value
end
return mask
end}
return Dataseries
| 22.016713 | 126 | 0.700025 | 3.25 |
7ab38b9c934e0c8138b2c54e7fc5fa13dfb45949 | 2,384 | rs | Rust | src/days/day12.rs | fxwiegand/advent-of-code-2021 | c164630bb2567585ff3e8574892eb67e4d777243 | [
"MIT"
] | null | null | null | src/days/day12.rs | fxwiegand/advent-of-code-2021 | c164630bb2567585ff3e8574892eb67e4d777243 | [
"MIT"
] | null | null | null | src/days/day12.rs | fxwiegand/advent-of-code-2021 | c164630bb2567585ff3e8574892eb67e4d777243 | [
"MIT"
] | 1 | 2021-12-01T17:47:04.000Z | 2021-12-01T17:47:04.000Z | use std::collections::HashMap;
pub(crate) fn solve_day12() -> u32 {
let input = include_str!("../puzzles/day12.txt");
let mut map = HashMap::new();
for line in input.lines() {
let (cave, cave2) = line.split_once('-').unwrap();
let entry = map.entry(cave.to_owned()).or_insert_with(Vec::new);
entry.push(cave2.to_string());
let entry2 = map.entry(cave2.to_owned()).or_insert_with(Vec::new);
entry2.push(cave.to_string());
}
get_paths("start".to_string(), &map, vec!["start".to_string()])
}
fn get_paths(cave: String, map: &HashMap<String, Vec<String>>, visited: Vec<String>) -> u32 {
if cave == "end" {
return 1;
}
let mut sum = 0;
for neighbour in map.get(&cave).unwrap() {
if !(visited.contains(neighbour) && &neighbour.to_lowercase() == neighbour) {
let mut seen = visited.clone();
seen.push(neighbour.to_string());
sum += get_paths(neighbour.to_string(), &map.clone(), seen)
}
}
sum
}
fn get_paths2(
cave: String,
map: &HashMap<String, Vec<String>>,
visited: Vec<String>,
twice: bool,
) -> u32 {
if cave == "end" {
return 1;
}
let mut sum = 0;
for neighbour in map.get(&cave).unwrap() {
if neighbour != "start" {
if !visited.contains(neighbour) || &neighbour.to_lowercase() != neighbour {
let mut seen = visited.clone();
seen.push(neighbour.to_string());
sum += get_paths2(neighbour.to_string(), &map.clone(), seen.clone(), twice);
} else if !twice {
let mut seen = visited.clone();
seen.push(neighbour.to_string());
sum += get_paths2(neighbour.to_string(), &map.clone(), seen.clone(), true);
}
}
}
sum
}
pub(crate) fn solve_day12_part2() -> u32 {
let input = include_str!("../puzzles/day12.txt");
let mut map = HashMap::new();
for line in input.lines() {
let (cave, cave2) = line.split_once('-').unwrap();
let entry = map.entry(cave.to_owned()).or_insert_with(Vec::new);
entry.push(cave2.to_string());
let entry2 = map.entry(cave2.to_owned()).or_insert_with(Vec::new);
entry2.push(cave.to_string());
}
get_paths2("start".to_string(), &map, vec!["start".to_string()], false)
}
| 34.550725 | 93 | 0.567953 | 3.171875 |
9c2bef9eec101c417b13e58d3bcb3bee40a4d3ba | 1,371 | js | JavaScript | src/js/04-fav.js | Adalab/modulo-2-evaluacion-final-LuciaRoNova3005 | b22f782a732c56189ab231e2d327fbc0fb661ecf | [
"MIT"
] | null | null | null | src/js/04-fav.js | Adalab/modulo-2-evaluacion-final-LuciaRoNova3005 | b22f782a732c56189ab231e2d327fbc0fb661ecf | [
"MIT"
] | null | null | null | src/js/04-fav.js | Adalab/modulo-2-evaluacion-final-LuciaRoNova3005 | b22f782a732c56189ab231e2d327fbc0fb661ecf | [
"MIT"
] | null | null | null | //Si el array que guarda la informacion del local tiene contenido me ejecutas Recupero los datos y lo pinto
//Funcion que escucha click en las peliculas y en las peliculas de favoritas con la js-shows//
function addListenShow() {
const cardShows = document.querySelectorAll(".js-shows");
for (const card of cardShows) {
card.addEventListener("click", handleClickFav);
}
}
function handleClickFav(event) {
// Identificar la li pulsada
const selectCardFav = event.currentTarget;
// Obtener la información asociada a la serie
const filmId = parseInt(selectCardFav.dataset.id);
//Buscamos si elemento clicado esta en nuestro array de favoritos
const idExist = arrayFavorite.find(
(favoritedata) => favoritedata.show.id === filmId
);
if (idExist === undefined) {
// El ID del array en el que se ha hecho click no está en el array de favoritos lo añade
const Objseriedata = arrayShows.find(
(seriedata) => seriedata.show.id === filmId
);
arrayFavorite.push(Objseriedata);
// El ID del array en el que se ha hecho click esta en el array de favoritos hace un filtro para eliminarlo
} else {
arrayFavorite = arrayFavorite.filter((fav) => fav.show.id !== filmId);
}
// Pinta las tarjetas en favoritas y las guarda en local
renderFavorites();
renderShows();
savedFav();
}
| 38.083333 | 112 | 0.699489 | 3.46875 |
4a3594a8bd9b3628a0f6f39f6b4e27c03f08a727 | 1,893 | sql | SQL | src/BugNET.Database/Stored Procedures/BugNet_IssueNotification_GetIssueNotificationsByIssueId.sql | thaihoc2/bugnet | 440bfd93d231c95acf20cf67d99a222d35bd7a4f | [
"MS-PL"
] | 200 | 2015-01-07T11:32:41.000Z | 2021-08-13T00:42:12.000Z | src/BugNET.Database/Stored Procedures/BugNet_IssueNotification_GetIssueNotificationsByIssueId.sql | China-HD/bugnet | cfab4aeacc7224425db86f0e1f94cf24e22acf2a | [
"MS-PL"
] | 203 | 2015-01-02T14:49:30.000Z | 2019-01-17T02:36:42.000Z | src/BugNET.Database/Stored Procedures/BugNet_IssueNotification_GetIssueNotificationsByIssueId.sql | China-HD/bugnet | cfab4aeacc7224425db86f0e1f94cf24e22acf2a | [
"MS-PL"
] | 236 | 2015-01-09T22:44:16.000Z | 2022-03-22T11:10:16.000Z |
CREATE PROCEDURE [dbo].[BugNet_IssueNotification_GetIssueNotificationsByIssueId]
@IssueId Int
AS
SET NOCOUNT ON
DECLARE @DefaultCulture NVARCHAR(50)
SET @DefaultCulture = (SELECT ISNULL(SettingValue, 'en-US') FROM BugNet_HostSettings WHERE SettingName = 'ApplicationDefaultLanguage')
DECLARE @tmpTable TABLE (IssueNotificationId int, IssueId int,NotificationUserId uniqueidentifier, NotificationUserName nvarchar(50), NotificationDisplayName nvarchar(50), NotificationEmail nvarchar(50), NotificationCulture NVARCHAR(50))
INSERT @tmpTable
SELECT
IssueNotificationId,
IssueId,
U.UserId NotificationUserId,
U.UserName NotificationUserName,
IsNull(DisplayName,'') NotificationDisplayName,
M.Email NotificationEmail,
ISNULL(UP.PreferredLocale, @DefaultCulture) AS NotificationCulture
FROM
BugNet_IssueNotifications
INNER JOIN Users U ON BugNet_IssueNotifications.UserId = U.UserId
INNER JOIN Memberships M ON BugNet_IssueNotifications.UserId = M.UserId
LEFT OUTER JOIN BugNet_UserProfiles UP ON U.UserName = UP.UserName
WHERE
IssueId = @IssueId
AND M.Email IS NOT NULL
ORDER BY
DisplayName
-- get all people on the project who want to be notified
INSERT @tmpTable
SELECT
ProjectNotificationId,
IssueId = @IssueId,
u.UserId NotificationUserId,
u.UserName NotificationUserName,
IsNull(DisplayName,'') NotificationDisplayName,
m.Email NotificationEmail,
ISNULL(UP.PreferredLocale, @DefaultCulture) AS NotificationCulture
FROM
BugNet_ProjectNotifications p,
BugNet_Issues i,
Users u,
Memberships m ,
BugNet_UserProfiles up
WHERE
IssueId = @IssueId
AND p.ProjectId = i.ProjectId
AND u.UserId = p.UserId
AND u.UserId = m.UserId
AND u.UserName = up.UserName
AND m.Email IS NOT NULL
SELECT DISTINCT IssueId,NotificationUserId, NotificationUserName, NotificationDisplayName, NotificationEmail, NotificationCulture FROM @tmpTable ORDER BY NotificationDisplayName
| 32.084746 | 237 | 0.820391 | 3.078125 |
5a069359e73e90f9d16604bca6fef4b522a68a42 | 2,516 | rs | Rust | rust/day-2/src/lib.rs | nathankleyn/advent-of-code-2017 | 25e3681af98f9979989e94c75a4728334b11f054 | [
"MIT"
] | null | null | null | rust/day-2/src/lib.rs | nathankleyn/advent-of-code-2017 | 25e3681af98f9979989e94c75a4728334b11f054 | [
"MIT"
] | null | null | null | rust/day-2/src/lib.rs | nathankleyn/advent-of-code-2017 | 25e3681af98f9979989e94c75a4728334b11f054 | [
"MIT"
] | null | null | null | #[allow(dead_code)]
fn day_2_part_1(input: &str) -> i64 {
input.lines().map(|row| {
let state = row.split_whitespace().fold(ChecksumState::zero(), |acc, c| {
let incoming: i64 = c.parse().unwrap();
acc.update(incoming)
});
state.largest.unwrap_or(0) - state.smallest.unwrap_or(0)
}).sum()
}
#[allow(dead_code)]
fn day_2_part_2(input: &str) -> i64 {
input.lines().map(|row| {
let columns: Vec<i64> = row.split_whitespace().map(|c| c.parse().unwrap()).collect();
let mut result: i64 = 0;
'outer: for (i, x) in columns.iter().enumerate() {
for (j, y) in columns.iter().enumerate() {
if i == j {
continue;
}
let xm = std::cmp::max(x, y);
let ym = std::cmp::min(x, y);
if xm % ym == 0 {
result = xm / ym;
break 'outer;
}
}
}
result
}).sum()
}
struct ChecksumState {
largest: Option<i64>,
smallest: Option<i64>
}
impl ChecksumState {
fn zero() -> ChecksumState {
ChecksumState {
largest: None,
smallest: None
}
}
fn update(&self, incoming: i64) -> ChecksumState {
let largest = match self.largest {
None => incoming,
Some(curr) => {
if incoming > curr {
incoming
} else {
curr
}
}
};
let smallest = match self.smallest {
None => incoming,
Some(curr) => {
if incoming < curr {
incoming
} else {
curr
}
}
};
ChecksumState {
largest: Some(largest),
smallest: Some(smallest)
}
}
}
#[cfg(test)]
mod tests {
use day_2_part_1;
use day_2_part_2;
#[test]
fn day_2_part_1_examples() {
assert_eq!(day_2_part_1("5 1 9 5\n7 5 3\n2 4 6 8"), 18);
}
#[test]
fn day_2_part_2_examples() {
assert_eq!(day_2_part_2("5 9 2 8\n9 4 7 3\n3 8 6 5"), 9);
}
const INPUT: &'static str = include_str!("input");
#[test]
fn day_2_part_1_test_input() {
assert_eq!(day_2_part_1(INPUT), 45158);
}
#[test]
fn day_2_part_2_test_input() {
assert_eq!(day_2_part_2(INPUT), 294);
}
}
| 22.872727 | 93 | 0.459062 | 3.34375 |
f04859b27ee91e595f5a5127a619b6f6d8f15b47 | 5,391 | py | Python | extract_embeddings.py | Artem531/opencv-face-recognition-with-YOLOv3 | 53a93711a079ea3739cab068aeaf5c684f6e53c4 | [
"MIT"
] | null | null | null | extract_embeddings.py | Artem531/opencv-face-recognition-with-YOLOv3 | 53a93711a079ea3739cab068aeaf5c684f6e53c4 | [
"MIT"
] | null | null | null | extract_embeddings.py | Artem531/opencv-face-recognition-with-YOLOv3 | 53a93711a079ea3739cab068aeaf5c684f6e53c4 | [
"MIT"
] | null | null | null | # USAGE
# python extract_embeddings.py --dataset dataset --embeddings output/embeddings.pickle \
# --detector face_detection_model --embedding-model openface_nn4.small2.v1.t7
# import the necessary packages
from imutils.face_utils import FaceAligner
from imutils import paths
import numpy as np
import argparse
import imutils
import pickle
import cv2
import os
import dlib
from PIL import Image
from yolo import YOLO, detect_video
from yolo3.utils import letterbox_image
from keras import backend as K
def detect_image(self, image):
if self.model_image_size != (None, None):
assert self.model_image_size[0]%32 == 0, 'Multiples of 32 required'
assert self.model_image_size[1]%32 == 0, 'Multiples of 32 required'
boxed_image = letterbox_image(image, tuple(reversed(self.model_image_size)))
else:
new_image_size = (image.width - (image.width % 32),
image.height - (image.height % 32))
boxed_image = letterbox_image(image, new_image_size)
image_data = np.array(boxed_image, dtype='float32')
#print(image_data.shape)
image_data /= 255.
image_data = np.expand_dims(image_data, 0) # Add batch dimension.
out_boxes, out_scores, out_classes = self.sess.run(
[self.boxes, self.scores, self.classes],
feed_dict={
self.yolo_model.input: image_data,
self.input_image_shape: [image.size[1], image.size[0]],
K.learning_phase(): 0
})
print('Found {} boxes for {}'.format(len(out_boxes), 'img'))
return out_boxes, out_scores, out_classes
# construct the argument parser and parse the arguments
ap = argparse.ArgumentParser()
ap.add_argument("-i", "--dataset", required=True,
help="path to input directory of faces + images")
ap.add_argument("-e", "--embeddings", required=True,
help="path to output serialized db of facial embeddings")
ap.add_argument("-m", "--embedding-model", required=True,
help="path to OpenCV's deep learning face embedding model")
ap.add_argument("-p", "--shape-predictor", required=True,
help="path to facial landmark predictor")
args = vars(ap.parse_args())
# load our serialized face detector from disk
print("[INFO] loading face detector...")
predictor = dlib.shape_predictor(args["shape_predictor"])
#detector = dlib.get_frontal_face_detector()
detector = YOLO()
# load our serialized face embedding model from disk
print("[INFO] loading face recognizer...")
embedder = cv2.dnn.readNetFromTorch(args["embedding_model"])
# grab the paths to the input images in our dataset
print("[INFO] quantifying faces...")
imagePaths = list(paths.list_images(args["dataset"]))
# initialize our lists of extracted facial embeddings and
# corresponding people names
knownEmbeddings = []
knownNames = []
# initialize the total number of faces processed
total = 0
# loop over the image paths
for (i, imagePath) in enumerate(imagePaths):
# extract the person name from the image path
print("[INFO] processing image {}/{}".format(i + 1,
len(imagePaths)))
name = imagePath.split(os.path.sep)[-2]
# load the image, resize it to have a width of 800 pixels (while
# maintaining the aspect ratio), and then grab the image
# dimensions
image = cv2.imread(imagePath)
image = imutils.resize(image, width=800)
#try to rise resolution
#gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
#blurred = cv2.GaussianBlur(gray, (5, 5), 0)
#image = blurred
#clahe = cv2.createCLAHE(clipLimit=4.0, tileGridSize=(8,8))
#image = clahe.apply(image)
#image = cv2.cvtColor(image, cv2.COLOR_GRAY2RGB)
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
(h, w) = image.shape[:2]
# we're making the assumption that each image has only ONE
# face, so find the bounding box with the largest probability
#align_faces
fa = FaceAligner(predictor, desiredFaceWidth=256)
#gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
#rects = detector(gray, 2)
rects = []
out_boxes, out_scores, out_classes = detect_image(detector, Image.fromarray(image))
for i, c in reversed(list(enumerate(out_classes))):
(x, y, x1, y1) = out_boxes[i]
w = abs(x - x1)
h = abs(y - y1)
startX = int(min(x1, x))
endX = startX + w
startY = int(min(y1, y))
endY = startY + h
left, right, bottom, top = startX, endX, endY, startY
rect = dlib.rectangle(int(top), int(left), int(bottom) , int(right))
rects.append(rect)
for rect in rects:
faceAligned = fa.align(image, gray, rect)
print(faceAligned)
cv2.imshow("Aligned", np.asarray(faceAligned))
cv2.waitKey(0)
face = faceAligned
(fH, fW) = face.shape[:2]
# ensure the face width and height are sufficiently large
if fW < 20 or fH < 20:
continue
# construct a blob for the face ROI, then pass the blob
# through our face embedding model to obtain the 128-d
# quantification of the face
faceBlob = cv2.dnn.blobFromImage(face, 1.0 / 255,
(96, 96), (0, 0, 0), swapRB=True, crop=False)
embedder.setInput(faceBlob)
vec = embedder.forward()
# add the name of the person + corresponding face
# embedding to their respective lists
knownNames.append(name)
knownEmbeddings.append(vec.flatten())
total += 1
# dump the facial embeddings + names to disk
print("[INFO] serializing {} encodings...".format(total))
data = {"embeddings": knownEmbeddings, "names": knownNames}
f = open(args["embeddings"], "wb")
f.write(pickle.dumps(data))
f.close() | 32.475904 | 88 | 0.708959 | 3 |
c3697edfe68dc21de74545afcefdff8f0ea10ab1 | 2,695 | go | Go | drpcsignal/signal.go | unistack-org/drpc | 7713ed76eb9b15259c57f7b85a1230628cf400ec | [
"MIT"
] | 992 | 2019-10-02T17:04:48.000Z | 2022-03-31T20:17:50.000Z | drpcsignal/signal.go | unistack-org/drpc | 7713ed76eb9b15259c57f7b85a1230628cf400ec | [
"MIT"
] | 23 | 2020-03-18T18:15:15.000Z | 2022-03-27T10:53:46.000Z | drpcsignal/signal.go | unistack-org/drpc | 7713ed76eb9b15259c57f7b85a1230628cf400ec | [
"MIT"
] | 29 | 2019-10-23T12:19:03.000Z | 2022-03-26T02:37:55.000Z | // Copyright (C) 2019 Storj Labs, Inc.
// See LICENSE for copying information.
package drpcsignal
import (
"sync"
"sync/atomic"
)
type signalStatus = uint32
const (
statusErrorSet = 0b10
statusChannelCreated = 0b01
)
// Signal contains an error value that can be set one and exports
// a number of ways to inspect it.
type Signal struct {
status signalStatus
mu sync.Mutex
ch chan struct{}
err error
}
// Wait blocks until the signal has been Set.
func (s *Signal) Wait() {
<-s.Signal()
}
// Signal returns a channel that will be closed when the signal is set.
func (s *Signal) Signal() chan struct{} {
if atomic.LoadUint32(&s.status)&statusChannelCreated != 0 {
return s.ch
}
return s.signalSlow()
}
// signalSlow is the slow path for Signal, so that the fast path is inlined into
// callers.
func (s *Signal) signalSlow() chan struct{} {
s.mu.Lock()
if set := s.status; set&statusChannelCreated == 0 {
s.ch = make(chan struct{})
atomic.StoreUint32(&s.status, set|statusChannelCreated)
}
s.mu.Unlock()
return s.ch
}
// Set stores the error in the signal. It only keeps track of the first
// error set, and returns true if it was the first error set.
func (s *Signal) Set(err error) (ok bool) {
if atomic.LoadUint32(&s.status)&statusErrorSet != 0 {
return false
}
return s.setSlow(err)
}
// setSlow is the slow path for Set, so that the fast path is inlined into
// callers.
func (s *Signal) setSlow(err error) (ok bool) {
s.mu.Lock()
if status := s.status; status&statusErrorSet == 0 {
ok = true
s.err = err
if status&statusChannelCreated == 0 {
s.ch = closed
}
// we have to store the flags after we set the channel but before we
// close it, otherwise there are races where a caller can hit the
// atomic fast path and observe invalid values.
atomic.StoreUint32(&s.status, statusErrorSet|statusChannelCreated)
if status&statusChannelCreated != 0 {
close(s.ch)
}
}
s.mu.Unlock()
return ok
}
// Get returns the error set with the signal and a boolean indicating if
// the result is valid.
func (s *Signal) Get() (error, bool) { //nolint
if atomic.LoadUint32(&s.status)&statusErrorSet != 0 {
return s.err, true
}
return nil, false
}
// IsSet returns true if the Signal is set.
func (s *Signal) IsSet() bool {
return atomic.LoadUint32(&s.status)&statusErrorSet != 0
}
// Err returns the error stored in the signal. Since one can store a nil error
// care must be taken. A non-nil error returned from this method means that
// the Signal has been set, but the inverse is not true.
func (s *Signal) Err() error {
if atomic.LoadUint32(&s.status)&statusErrorSet != 0 {
return s.err
}
return nil
}
| 24.724771 | 80 | 0.696104 | 3.046875 |
538de2e23f77840afb56e0666eb83b2a87142fb9 | 9,246 | swift | Swift | Select_Assignment1_Sorts.playground/Sources/XCTestAsserts.swift | huydangquoc/select_w1_assignment | 721c2c5d7b598a15aab1cdcfbfa608cc6b0e9ea8 | [
"Apache-2.0"
] | null | null | null | Select_Assignment1_Sorts.playground/Sources/XCTestAsserts.swift | huydangquoc/select_w1_assignment | 721c2c5d7b598a15aab1cdcfbfa608cc6b0e9ea8 | [
"Apache-2.0"
] | 1 | 2016-06-30T17:16:10.000Z | 2016-07-04T05:25:23.000Z | Select_Assignment1_Sorts.playground/Sources/XCTestAsserts.swift | huydangquoc/select_w1_assignment | 721c2c5d7b598a15aab1cdcfbfa608cc6b0e9ea8 | [
"Apache-2.0"
] | null | null | null | // Playground Tests
import Foundation
let defaultMessage = ""
/// Emits a test failure if the general `Boolean` expression passed
/// to it evaluates to `false`.
///
/// - Requires: This and all other XCTAssert* functions must be called from
/// within a test method, as passed to `XCTMain`.
/// Assertion failures that occur outside of a test method will *not* be
/// reported as failures.
///
/// - Parameter expression: A boolean test. If it evaluates to `false`, the
/// assertion fails and emits a test failure.
/// - Parameter message: An optional message to use in the failure if the
/// assertion fails. If no message is supplied a default message is used.
///
/// For example
///
/// ```swift
/// class TestCase: XCTestCase {
/// func testAssertions() {
/// XCTAssertEqual(1, 2)
/// XCTAssertEqual([1, 2], [2, 3])
/// XCTAssertGreaterThanOrEqual(1, 2)
/// XCTAssertTrue(true)
/// }
/// }
/// ```
///
public func XCTAssert(
@autoclosure expression: () -> BooleanType,
_ message: String = defaultMessage
) -> String {
return returnTestResult(expression(), message: message)
}
public func XCTAssertEqual<T : Equatable>(
@autoclosure expression1: () -> T?,
@autoclosure _ expression2: () -> T?,
_ message: String = defaultMessage
) -> String {
return returnTestResult(
expression1() == expression2(),
message: "\(message) - expected: \(expression2()), actual: \(expression1())")
}
public func XCTAssertEqual<T : Equatable>(
@autoclosure expression1: () -> ArraySlice<T>,
@autoclosure _ expression2: () -> ArraySlice<T>,
_ message: String = defaultMessage
) -> String {
return returnTestResult(
expression1() == expression2(),
message: "\(message) - expected: \(expression2()), actual: \(expression1())")
}
public func XCTAssertEqual<T : Equatable>(
@autoclosure expression1: () -> ContiguousArray<T>,
@autoclosure _ expression2: () -> ContiguousArray<T>,
_ message: String = defaultMessage
) -> String {
return returnTestResult(
expression1() == expression2(),
message: "\(message) - expected: \(expression2()), actual: \(expression1())")
}
public func XCTAssertEqual<T : Equatable>(
@autoclosure expression1: () -> [T],
@autoclosure _ expression2: () -> [T],
_ message: String = defaultMessage
) -> String {
return returnTestResult(
expression1() == expression2(),
message: "\(message) - expected: \(expression2()), actual: \(expression1())")
}
public func XCTAssertEqual<T, U : Equatable>(
@autoclosure expression1: () -> [T : U],
@autoclosure _ expression2: () -> [T : U],
_ message: String = defaultMessage
) -> String {
return returnTestResult(
expression1() == expression2(),
message: "\(message) - expected: \(expression2()), actual: \(expression1())")
}
public func XCTAssertFalse(
@autoclosure expression: () -> BooleanType,
_ message: String = defaultMessage
) -> String {
return returnTestResult(!expression().boolValue, message: message)
}
public func XCTAssertGreaterThan<T : Comparable>(
@autoclosure expression1: () -> T,
@autoclosure _ expression2: () -> T,
_ message: String = defaultMessage
) -> String {
return returnTestResult(
expression1() > expression2(),
message: "\(message) - actual: \(expression1()) > \(expression2())")
}
public func XCTAssertGreaterThanOrEqual<T : Comparable>(
@autoclosure expression1: () -> T,
@autoclosure _ expression2: () -> T,
_ message: String = defaultMessage
) -> String {
return returnTestResult(
expression1() >= expression2(),
message: "\(message) - actual: \(expression1()) >= \(expression2())")
}
public func XCTAssertLessThan<T : Comparable>(
@autoclosure expression1: () -> T,
@autoclosure _ expression2: () -> T,
_ message: String = defaultMessage
) -> String {
return returnTestResult(
expression1() < expression2(),
message: "\(message) - actual: \(expression1()) < \(expression2())")
}
public func XCTAssertLessThanOrEqual<T : Comparable>(
@autoclosure expression1: () -> T,
@autoclosure _ expression2: () -> T,
_ message: String = defaultMessage
) -> String {
return returnTestResult(
expression1() <= expression2(),
message: "\(message) - actual: \(expression1()) <= \(expression2())")
}
public func XCTAssertNil(
@autoclosure expression: () -> Any?,
_ message: String = ""
) -> String {
var result = true
if let _ = expression() { result = false }
return returnTestResult(
result,
message: "\(message) - expected: nil, actual: \(expression())")
}
public func XCTAssertNotEqual<T : Equatable>(
@autoclosure expression1: () -> T?,
@autoclosure _ expression2: () -> T?,
_ message: String = defaultMessage
) -> String {
return returnTestResult(
expression1() != expression2(),
message: "\(message) - expected: \(expression1()) =! \(expression2())")
}
public func XCTAssertNotEqual<T : Equatable>(
@autoclosure expression1: () -> ContiguousArray<T>,
@autoclosure _ expression2: () -> ContiguousArray<T>,
_ message: String = defaultMessage
) -> String {
return returnTestResult(
expression1() != expression2(),
message: "\(message) - expected: \(expression1()) != \(expression2())")
}
public func XCTAssertNotEqual<T : Equatable>(
@autoclosure expression1: () -> ArraySlice<T>,
@autoclosure _ expression2: () -> ArraySlice<T>,
_ message: String = defaultMessage
) -> String {
return returnTestResult(
expression1() != expression2(),
message: "\(message) - expected: \(expression1()) != \(expression2())")
}
public func XCTAssertNotEqual<T : Equatable>(
@autoclosure expression1: () -> [T],
@autoclosure _ expression2: () -> [T],
_ message: String = defaultMessage
) -> String {
return returnTestResult(
expression1() != expression2(),
message: "\(message) - expected: \(expression1()) != \(expression2())")
}
public func XCTAssertNotEqual<T, U : Equatable>(
@autoclosure expression1: () -> [T : U],
@autoclosure _ expression2: () -> [T : U],
_ message: String = defaultMessage
) -> String {
return returnTestResult(
expression1() != expression2(),
message: "\(message) - expected: \(expression1()) != \(expression2())")
}
public func XCTAssertNotNil(
@autoclosure expression: () -> Any?,
_ message: String = ""
) -> String {
var result = false
if let _ = expression() { result = true }
return returnTestResult(result, message: message)
}
public func XCTAssertTrue(
@autoclosure expression: () -> BooleanType,
_ message: String = defaultMessage
) -> String {
return returnTestResult(expression(), message: message)
}
public func XCTFail(message: String = "") -> String {
return failMessage(message)
}
func returnTestResult(result: BooleanType, message: String) -> String {
return result.boolValue ? okMessage() : failMessage(message)
}
func okMessage() -> String { return "✅" }
func failMessage(message: String) -> String { return "❌" + message }
// This class was based on GitHub gist:
// https://gist.github.com/croath/a9358dac0530d91e9e2b
public class XCTestCase: NSObject {
public override init(){
super.init()
self.runTestMethods()
}
public class func setUp() {}
public func setUp() {}
public class func tearDown() {}
public func tearDown() {}
override public var description: String { return "" }
private func runTestMethods(){
self.dynamicType.setUp()
var mc: CUnsignedInt = 0
var mlist: UnsafeMutablePointer<Method> =
class_copyMethodList(self.dynamicType.classForCoder(), &mc);
(0 ..< mc).forEach { _ in
let m = method_getName(mlist.memory)
if String(m).hasPrefix("test") {
self.setUp()
let startTime = NSDate()
self.performSelectorOnMainThread(
m,
withObject: nil,
waitUntilDone: true)
let endTime = NSDate()
let runTime = endTime.timeIntervalSinceDate(startTime)
print("Run time: \(runTime)")
self.tearDown()
}
mlist = mlist.successor()
}
self.dynamicType.tearDown()
}
}
| 34.5 | 85 | 0.574519 | 3 |
35cb0b95eb8ab7de23bd8cecd4b371610c69f84e | 7,687 | lua | Lua | system_drive/libs/window.lua | poeticAndroid/homegirl_pro | 224f7060423ed3d615baf0116029c9cf8eeb6c3d | [
"MIT"
] | 7 | 2020-05-09T06:12:53.000Z | 2021-05-29T02:50:45.000Z | system_drive/libs/window.lua | poeticAndroid/homegirl_pro | 224f7060423ed3d615baf0116029c9cf8eeb6c3d | [
"MIT"
] | 1 | 2020-08-16T13:44:11.000Z | 2020-08-18T09:35:59.000Z | system_drive/libs/window.lua | poeticAndroid/homegirl_pro | 224f7060423ed3d615baf0116029c9cf8eeb6c3d | [
"MIT"
] | null | null | null | local Widget = require("widget")
local Window = Widget:extend()
do
function Window:constructor(title, left, top, width, height, parent)
self.children = {}
self:attachto(nil, parent, parent)
self:size(width, height)
self:position(left, top)
self:title(title)
end
function Window:attachto(...)
Widget.attachto(self, ...)
self.mainvp = view.new(self.container)
self._closebtn = view.new(self.container)
self._titlevp = view.new(self.container)
self._hidebtn = view.new(self.container)
self._resbtn = view.new(self.container, 8, 8, 8, 8)
self:title(self:title())
view.active(self.mainvp)
self:redraw()
if view.attribute(self.parentvp, "hide-enabled") == "true" then
self.onhide = function(self)
view.visible(self.container, false)
end
end
end
function Window:redraw()
if not self.container then
return
end
local prevvp = view.active()
view.active(self.container)
local focused = view.focused(self.container)
local mx, my, mbtn = input.mouse()
local sw, sh = view.size(self.container)
local tw, th = text.draw(self._title, self.font, 0, 0)
local btnw, btnh = math.ceil((th + 2) * 1.5), th + 2
view.position(self.mainvp, 3, btnh)
view.size(self.mainvp, sw - 6, sh - btnh - 3)
gfx.bgcolor(focused and self.fgcolor or self.bgcolor)
gfx.cls()
self:outset(0, 0, sw, sh)
self:inset(2, 2, sw - 4, sh - 4)
view.active(self._closebtn)
view.size(self._closebtn, btnw, btnh)
gfx.bgcolor(focused and self.fgcolor or self.bgcolor)
gfx.cls()
gfx.fgcolor(self.darkcolor)
gfx.bar(4, 3, btnw - 8, btnh - 6)
gfx.fgcolor(focused and self.lightcolor or self.bgcolor)
gfx.bar(5, 4, btnw - 10, btnh - 8)
if view.attribute(self._closebtn, "pressed") == "true" then
self:inset(0, 0, btnw, btnh)
else
self:outset(0, 0, btnw, btnh)
end
view.visible(self._closebtn, self.onclose and true or false)
view.active(self._titlevp)
local btns = 0
if self.onclose then
btns = btns + 1
end
view.position(self._titlevp, btnw * btns, 0)
if self.onhide then
btns = btns + 1
end
local w, h = view.size(self._titlevp, sw - btnw * btns, btnh)
gfx.bgcolor(focused and self.fgcolor or self.bgcolor)
gfx.cls()
gfx.fgcolor(focused and self.fgtextcolor or self.bgtextcolor)
text.draw(self._title, self.font, math.min(w - tw, w / 2 - tw / 2), 1)
self:outset(0, 0, w, h)
if not self.onclose then
gfx.pixel(0, h - 1, gfx.pixel(0, h - 2))
gfx.pixel(1, h - 1, gfx.pixel(1, h - 2))
end
if not self.onhide then
gfx.pixel(w - 2, h - 1, gfx.pixel(w - 2, h - 2))
end
view.active(self._hidebtn)
view.position(self._hidebtn, sw - btnw, 0)
view.size(self._hidebtn, btnw, btnh)
gfx.bgcolor(focused and self.fgcolor or self.bgcolor)
gfx.cls()
gfx.fgcolor(self.darkcolor)
gfx.bar(3, 2, btnw - 6, btnh - 4)
gfx.fgcolor(focused and self.fgcolor or self.bgcolor)
gfx.bar(4, 3, btnw - 8, btnh - 6)
gfx.fgcolor(self.darkcolor)
gfx.bar(3, 2, btnw / 2 - 2, btnh / 2 - 1)
gfx.fgcolor(focused and self.lightcolor or self.bgcolor)
gfx.bar(4, 3, btnw / 2 - 4, btnh / 2 - 3)
if view.attribute(self._hidebtn, "pressed") == "true" then
self:inset(0, 0, btnw, btnh)
else
self:outset(0, 0, btnw, btnh)
end
view.visible(self._hidebtn, self.onhide and true or false)
view.active(self._resbtn)
local w, h = view.size(self._resbtn)
view.position(self._resbtn, sw - w, sh - h)
gfx.bgcolor(focused and self.fgcolor or self.bgcolor)
gfx.cls()
self:outset(0, 0, w + 1, h + 1)
self:outset(-1, -1, w + 1, h + 1)
gfx.pixel(0, h - 2, gfx.pixel(1, h - 2))
gfx.pixel(w - 2, 0, gfx.pixel(w - 2, 1))
view.visible(self._resbtn, self.resizable and true or false)
view.active(prevvp)
end
function Window:title(title)
if title then
if self.container then
self._title = view.attribute(self.container, "title", title)
end
self:redraw()
end
return self._title
end
function Window:icon(icon)
if icon then
if self.container then
self._icon = view.attribute(self.container, "icon", icon)
end
end
return self._icon
end
function Window:position(l, t)
if not t then
l, t = view.position(self.container)
end
local sw, sh = view.size(self.parentvp)
local ww, wh = view.size(self.container)
local bw, bh = view.size(self._closebtn)
local minl = -ww + bw * 2
local maxl = sw - bw * 2
local mint = 0
local maxt = sh - bh
local thres = 16
if l >= 0 and t >= 0 and l <= sw - ww and t <= sh - wh then
self._snapped = false
end
if l < -thres or t < -thres or l > sw - ww + thres or t > sh - wh + thres then
self._snapped = true
end
if not self._snapped then
minl, mint = 0, 0
maxl, maxt = sw - ww, sh - wh
end
if wh > sh then
maxl = sw - ww
maxt = sh - wh
end
if l < minl then
l = minl
end
if t < mint then
t = mint
end
if l > maxl then
l = maxl
end
if t > maxt then
t = maxt
end
return Widget.position(self, l, t)
end
function Window:size(w, h)
if not h then
w, h = view.size(self.container)
end
local wl, wt = view.position(self.container)
local sw, sh = view.size(self.parentvp)
local bw, bh = view.size(self._closebtn)
local minw, minh = bw * 2, bh * 2
local maxw, maxh = 640, 480
if not self._snapped then
maxw, maxh = sw - wl, sh - wt
end
if w < minw then
w = minw
end
if h < minh then
h = minh
end
if w > maxw then
w = maxw
end
if h > maxh then
h = maxh
end
return Widget.size(self, w, h)
end
function Window:step(time)
local prevvp = view.active()
view.active(self.container)
local vw, vh = view.size(self.container)
local mx, my, mbtn, _x, _y = input.mouse()
if self._lastmbtn == 0 and mbtn == 1 then
view.zindex(self.container, -1)
end
if self.onclose and self:gotclicked(self._closebtn) then
self:redraw()
view.active(prevvp)
return self:onclose()
end
if self.onhide and self:gotclicked(self._hidebtn) then
self:redraw()
view.active(prevvp)
return self:onhide()
end
view.active(self._titlevp)
_x, _y, mbtn = input.mouse()
if mbtn == 1 and not self._moving then
self._moving = true
self._movingx = mx
self._movingy = my
end
view.active(self._resbtn)
_x, _y, mbtn = input.mouse()
if mbtn == 1 then
self._resizing = true
vw, vh = view.size(self._resbtn)
self:size(mx + vw / 2, my + vh / 2)
self:redraw()
else
self._resizing = false
self:position()
end
view.active(self.container)
_x, _y, mbtn = input.mouse()
if mbtn == 1 then
if self._moving then
local left, top = self:position()
self:position(left + mx - self._movingx, top + my - self._movingy)
end
else
self._moving = false
if view.focused(self.container) then
view.focused(self.mainvp, true)
end
end
if self._focused ~= view.focused(self.container) or mbtn ~= 0 then
self._focused = view.focused(self.container)
self._lastmbtn = mbtn
self:redraw()
end
if not self._moving and not self._resizing then
for name, child in pairs(self.children) do
child:step(time)
end
end
view.active(prevvp)
end
end
return Window
| 28.261029 | 82 | 0.605958 | 3.09375 |
dd66aeadc83afe26033cefc1913ea0c459cc15c2 | 4,345 | swift | Swift | OktoIDE/Pods/Prestyler/Prestyler/Classes/Extensions.swift | MediBoss/OktoIDE | e34fa137b5f093c32b7f2ad7a016f5ecacf4cf36 | [
"Apache-2.0"
] | null | null | null | OktoIDE/Pods/Prestyler/Prestyler/Classes/Extensions.swift | MediBoss/OktoIDE | e34fa137b5f093c32b7f2ad7a016f5ecacf4cf36 | [
"Apache-2.0"
] | 2 | 2019-07-08T23:43:09.000Z | 2019-07-17T16:43:50.000Z | OktoIDE/Pods/Prestyler/Prestyler/Classes/Extensions.swift | mediassumani/OktoIDE | e34fa137b5f093c32b7f2ad7a016f5ecacf4cf36 | [
"Apache-2.0"
] | null | null | null | //
// StringExtension.swift
// Pods-Prestyler_Example
//
// Created by Ilya Krupko on 28/02/2019.
//
import Foundation
/// Prestyler uses a public extension to provide access to string formatting. Next methods can be used for every string.
public extension String {
/// Convert string to attributed string by looking for embeded tag and find sutable patterns.
///
/// - Returns: Attributed string
func prestyled() -> NSAttributedString {
var textToStyle = self
let appliedRules = Prestyler.findTextRules(&textToStyle)
var resultedText = NSMutableAttributedString(string: textToStyle)
appliedRules.forEach { $0.applyTo(text: &resultedText) }
return resultedText
}
/// Convert string to attributed string by using provided rule pattern.
///
/// - Parameter rule: pattern rule to apply
/// - Returns: attributed string
func prestyledBy(rule: String) -> NSAttributedString {
return (rule + self + rule).prestyled()
}
/// Convert string to attributed string by using provided styles.
///
/// - Parameter styles: styles to apply
/// - Returns: attributed string
func prestyledBy(styles: Any...) -> NSAttributedString {
let rule = TextRule(styles: styles, positions: [0, self.count])
var resultedText = NSMutableAttributedString(string: self)
rule.applyTo(text: &resultedText)
return resultedText
}
}
extension String {
func index(at position: Int, from start: Index? = nil) -> Index? {
let startingIndex = start ?? startIndex
return index(startingIndex, offsetBy: position, limitedBy: endIndex)
}
func character(at position: Int) -> Character? {
guard position >= 0, let indexPosition = index(at: position) else {
return nil
}
return self[indexPosition]
}
func hexToUIColor() -> UIColor? {
var string = self.trimmingCharacters(in: .whitespacesAndNewlines).uppercased()
string = string.replacingOccurrences(of: "#", with: "")
if string.count == 3 {
let r = string.character(at: 0) ?? "0"
let g = string.character(at: 1) ?? "0"
let b = string.character(at: 2) ?? "0"
string = "\(r)\(r)\(g)\(g)\(b)\(b)"
}
if string.count != 6 {
return nil
}
var rgbValue: UInt32 = 0
Scanner(string: string).scanHexInt32(&rgbValue)
return UIColor(
red: CGFloat((rgbValue & 0xFF0000) >> 16) / 255.0,
green: CGFloat((rgbValue & 0x00FF00) >> 8) / 255.0,
blue: CGFloat(rgbValue & 0x0000FF) / 255.0,
alpha: CGFloat(1.0)
)
}
}
extension StringProtocol where Index == String.Index {
func indexes(of string: Self, options: String.CompareOptions = []) -> [Int] {
var result: [Int] = []
var start = startIndex
while start < endIndex,
let range = self[start..<endIndex].range(of: string, options: options) {
result.append(range.lowerBound.encodedOffset)
start = range.lowerBound < range.upperBound ? range.upperBound :
index(range.lowerBound, offsetBy: 1, limitedBy: endIndex) ?? endIndex
}
return result
}
}
extension UIColor {
func mixin(infusion: UIColor, alpha: CGFloat) -> UIColor {
let alpha2 = min(1.0, max(0, alpha))
let beta = 1.0 - alpha2
var r1:CGFloat = 0, r2:CGFloat = 0
var g1:CGFloat = 0, g2:CGFloat = 0
var b1:CGFloat = 0, b2:CGFloat = 0
var a1:CGFloat = 0, a2:CGFloat = 0
if getRed(&r1, green: &g1, blue: &b1, alpha: &a1) &&
infusion.getRed(&r2, green: &g2, blue: &b2, alpha: &a2) {
let red = r1 * beta + r2 * alpha2;
let green = g1 * beta + g2 * alpha2;
let blue = b1 * beta + b2 * alpha2;
let alpha = a1 * beta + a2 * alpha2;
return UIColor(red: red, green: green, blue: blue, alpha: alpha)
}
return self
}
}
extension NSRange {
func splitUnitary() -> [NSRange] {
var result = [NSRange]()
for index in 0..<self.length {
result.append(NSRange(location: self.location + index, length: 1))
}
return result
}
}
| 35.040323 | 120 | 0.588953 | 3.15625 |
5ae222da8ce3145238c55a2bbf4a71a4f81e8c8a | 5,657 | rs | Rust | layout21raw/src/bbox.rs | colepoirier/Layout21 | 227c0a317619176e9b4ca87aa23cbd99526f260c | [
"BSD-3-Clause"
] | null | null | null | layout21raw/src/bbox.rs | colepoirier/Layout21 | 227c0a317619176e9b4ca87aa23cbd99526f260c | [
"BSD-3-Clause"
] | null | null | null | layout21raw/src/bbox.rs | colepoirier/Layout21 | 227c0a317619176e9b4ca87aa23cbd99526f260c | [
"BSD-3-Clause"
] | null | null | null | //!
//! # Rectangular Bounding Boxes and Associated Trait
//!
// Crates.io
use serde::{Deserialize, Serialize};
// Local imports
use crate::{
geom::{Point, Shape},
Int, Rect,
};
/// # Axis-Aligned Rectangular Bounding Box
///
/// Points `p0` and `p1` represent opposite corners of a bounding rectangle.
/// `p0` is always closest to negative-infinity, in both x and y,
/// and `p1` is always closest to positive-infinity.
///
#[derive(Debug, Default, Copy, Clone, Deserialize, Serialize, PartialEq, Eq)]
pub struct BoundBox {
pub p0: Point,
pub p1: Point,
}
impl BoundBox {
/// Create a new [BoundBox] from two [Point]s.
/// Callers are responsible for ensuring that p0.x <= p1.x, and p0.y <= p1.y.
fn new(p0: Point, p1: Point) -> Self {
Self { p0, p1 }
}
/// Create a new [BoundBox] from a single [Point].
/// The resultant [BoundBox] comprises solely the point, having zero area.
pub fn from_point(pt: &Point) -> Self {
Self {
p0: pt.clone(),
p1: pt.clone(),
}
}
/// Create a new [BoundBox] from two points
pub fn from_points(p0: &Point, p1: &Point) -> Self {
Self {
p0: Point::new(p0.x.min(p1.x), p0.y.min(p1.y)),
p1: Point::new(p0.x.max(p1.x), p0.y.max(p1.y)),
}
}
/// Create an empty, otherwise invalid [BoundBox]
pub fn empty() -> Self {
Self {
p0: Point::new(Int::MAX, Int::MAX),
p1: Point::new(Int::MIN, Int::MIN),
}
}
/// Boolean indication of whether a box is empty
pub fn is_empty(&self) -> bool {
self.p0.x > self.p1.x || self.p0.y > self.p1.y
}
/// Boolean indication of whether [Point] `pt` lies inside out box.
pub fn contains(&self, pt: &Point) -> bool {
self.p0.x <= pt.x && self.p1.x >= pt.x && self.p0.y <= pt.y && self.p1.y >= pt.y
}
/// Expand an existing [BoundBox] in all directions by `delta`
pub fn expand(&mut self, delta: Int) {
self.p0.x -= delta;
self.p0.y -= delta;
self.p1.x += delta;
self.p1.y += delta;
}
/// Get the box's size as an (x,y) tuple
pub fn size(&self) -> (Int, Int) {
(self.p1.x - self.p0.x, self.p1.y - self.p0.y)
}
/// Get the box's center
pub fn center(&self) -> Point {
Point::new((self.p0.x + self.p1.x) / 2, (self.p0.y + self.p1.y) / 2)
}
}
///
/// # Bounding Box Trait
///
/// Methods for interacting with [BoundBox]s.
/// Implementations for [Point]s, [Shape]s, and [BoundBox]s
/// enable geometric transformations such as union and intersection.
///
pub trait BoundBoxTrait {
/// Compute a rectangular bounding box around the implementing type.
fn bbox(&self) -> BoundBox;
/// Compute the intersection with rectangular bounding box `bbox`.
/// Creates and returns a new [BoundBox].
/// Default implementation is to return the intersection of `self.bbox()` and `bbox`.
fn intersection(&self, bbox: &BoundBox) -> BoundBox {
self.bbox().intersection(&bbox)
}
/// Compute the union with rectangular bounding box `bbox`.
/// Creates and returns a new [BoundBox].
/// Default implementation is to return the union of `self.bbox()` and `bbox`.
fn union(&self, bbox: &BoundBox) -> BoundBox {
self.bbox().union(&bbox)
}
}
impl BoundBoxTrait for BoundBox {
fn bbox(&self) -> BoundBox {
// We're great as we are, as a [BoundBox] already.
// Create a clone to adhere to our "new bbox" return-type.
self.clone()
}
fn intersection(&self, bbox: &BoundBox) -> BoundBox {
let pmin = Point::new(self.p0.x.max(bbox.p0.x), self.p0.y.max(bbox.p0.y));
let pmax = Point::new(self.p1.x.min(bbox.p1.x), self.p1.y.min(bbox.p1.y));
// Check for empty intersection, and return an empty box if so
if pmin.x > pmax.x || pmin.y > pmax.y {
return BoundBox::empty();
}
// Otherwise return the intersection
BoundBox::new(pmin, pmax)
}
fn union(&self, bbox: &BoundBox) -> BoundBox {
// Take the minimum and maximum of the two bounding boxes
BoundBox::new(
Point::new(self.p0.x.min(bbox.p0.x), self.p0.y.min(bbox.p0.y)),
Point::new(self.p1.x.max(bbox.p1.x), self.p1.y.max(bbox.p1.y)),
)
}
}
impl BoundBoxTrait for Point {
fn bbox(&self) -> BoundBox {
BoundBox::from_point(self)
}
fn intersection(&self, bbox: &BoundBox) -> BoundBox {
if !bbox.contains(self) {
return BoundBox::empty();
}
bbox.intersection(&BoundBox::from_point(self))
}
fn union(&self, bbox: &BoundBox) -> BoundBox {
BoundBox::new(
Point::new(self.x.min(bbox.p0.x), self.y.min(bbox.p0.y)),
Point::new(self.x.max(bbox.p1.x), self.y.max(bbox.p1.y)),
)
}
}
impl BoundBoxTrait for Shape {
fn bbox(&self) -> BoundBox {
// Dispatch based on shape-type, either two-Point or multi-Point form.
match self {
Shape::Rect(ref r) => BoundBox::from_points(&r.p0, &r.p1),
Shape::Polygon(ref p) => (&p.points).bbox(),
Shape::Path(ref p) => (&p.points).bbox(),
}
}
}
impl BoundBoxTrait for Rect {
fn bbox(&self) -> BoundBox {
BoundBox::from_points(&self.p0, &self.p1)
}
}
impl BoundBoxTrait for Vec<Point> {
fn bbox(&self) -> BoundBox {
// Take the union of all points in the vector
let mut bbox = BoundBox::empty();
for pt in self {
bbox = bbox.union(&pt.bbox());
}
bbox
}
}
| 33.473373 | 89 | 0.574156 | 3.53125 |
f0430557dbfd094f1b6f8f4b8c1e8e82129f2452 | 6,822 | js | JavaScript | dist/models/state-models.js | a9udn9u/last-step | 7e7e95ed52f8f8af9cb0621ac618859f95843cc1 | [
"MIT"
] | 2 | 2017-03-28T00:16:40.000Z | 2018-11-18T06:24:54.000Z | dist/models/state-models.js | a9udn9u/last-step | 7e7e95ed52f8f8af9cb0621ac618859f95843cc1 | [
"MIT"
] | null | null | null | dist/models/state-models.js | a9udn9u/last-step | 7e7e95ed52f8f8af9cb0621ac618859f95843cc1 | [
"MIT"
] | null | null | null | "use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const utils_1 = require("~/utils");
/**
* Simple 1-to-M mapping
*/
class OneToMany extends Map {
constructor() {
super();
}
}
/**
* Target to sources mapping
*/
class TargetToSources extends OneToMany {
/**
* Key is the target file, value is a set of sources that contribute
* to the target
*/
constructor() {
super();
}
/**
* For each target, trace sources back to their original sources
* @param {Object} oldTts Old targetToSourcesMap
*/
trace(oldTTS) {
this.forEach((values, key) => {
let newValues = Array.from(values)
.map(v => oldTTS.get(v))
.filter(vals => !!vals)
.reduce((all, vals) => utils_1.Utils.union(all, vals), new Set());
this.set(key, newValues);
});
}
/**
* Create a new map with flipped key/values
*/
flip() {
let flipped = new SourceToTargets();
this.forEach((values, key) => {
values.forEach(val => {
let rev = flipped.get(val) || new Set();
rev.add(key);
flipped.set(val, rev);
});
});
return flipped;
}
}
exports.TargetToSources = TargetToSources;
class SourceToTargets extends OneToMany {
}
exports.SourceToTargets = SourceToTargets;
class Context {
constructor(packed = {}) {
this.rootDir = packed.rootDir;
this.sourceDir = packed.sourceDir;
this.workDir = packed.workDir;
this.index = packed.index;
this.input = packed.input;
this.output = packed.output;
this.targetToSources = packed.targetToSources;
this.sourceToTargets = packed.sourceToTargets;
}
}
exports.Context = Context;
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoic3RhdGUtbW9kZWxzLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vLi4vc3JjL21vZGVscy9zdGF0ZS1tb2RlbHMudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6Ijs7QUFBQSxtQ0FBZ0M7QUFJaEM7O0dBRUc7QUFDSCxlQUFtQixTQUFRLEdBQWM7SUFDdkM7UUFDRSxLQUFLLEVBQUUsQ0FBQztJQUNWLENBQUM7Q0FDRjtBQUVEOztHQUVHO0FBQ0gscUJBQTZCLFNBQVEsU0FBaUI7SUFDcEQ7OztPQUdHO0lBQ0g7UUFDRSxLQUFLLEVBQUUsQ0FBQztJQUNWLENBQUM7SUFFRDs7O09BR0c7SUFDSCxLQUFLLENBQUMsTUFBdUI7UUFDM0IsSUFBSSxDQUFDLE9BQU8sQ0FBQyxDQUFDLE1BQU0sRUFBRSxHQUFHLEVBQUUsRUFBRTtZQUMzQixJQUFJLFNBQVMsR0FBRyxLQUFLLENBQUMsSUFBSSxDQUFDLE1BQU0sQ0FBQztpQkFDN0IsR0FBRyxDQUFDLENBQUMsQ0FBQyxFQUFFLENBQUMsTUFBTSxDQUFDLEdBQUcsQ0FBQyxDQUFDLENBQUMsQ0FBQztpQkFDdkIsTUFBTSxDQUFDLElBQUksQ0FBQyxFQUFFLENBQUMsQ0FBQyxDQUFDLElBQUksQ0FBQztpQkFDdEIsTUFBTSxDQUFDLENBQUMsR0FBRyxFQUFFLElBQUksRUFBRSxFQUFFLENBQUMsYUFBSyxDQUFDLEtBQUssQ0FBQyxHQUFHLEVBQUUsSUFBSSxDQUFDLEVBQUUsSUFBSSxHQUFHLEVBQUUsQ0FBQyxDQUFDO1lBQzlELElBQUksQ0FBQyxHQUFHLENBQUMsR0FBRyxFQUFFLFNBQVMsQ0FBQyxDQUFDO1FBQzNCLENBQUMsQ0FBQyxDQUFDO0lBQ0wsQ0FBQztJQUVEOztPQUVHO0lBQ0gsSUFBSTtRQUNGLElBQUksT0FBTyxHQUFvQixJQUFJLGVBQWUsRUFBRSxDQUFDO1FBQ3JELElBQUksQ0FBQyxPQUFPLENBQUMsQ0FBQyxNQUFNLEVBQUUsR0FBRyxFQUFFLEVBQUU7WUFDM0IsTUFBTSxDQUFDLE9BQU8sQ0FBQyxHQUFHLENBQUMsRUFBRTtnQkFDbkIsSUFBSSxHQUFHLEdBQUcsT0FBTyxDQUFDLEdBQUcsQ0FBQyxHQUFHLENBQUMsSUFBSSxJQUFJLEdBQUcsRUFBVSxDQUFDO2dCQUNoRCxHQUFHLENBQUMsR0FBRyxDQUFDLEdBQUcsQ0FBQyxDQUFDO2dCQUNiLE9BQU8sQ0FBQyxHQUFHLENBQUMsR0FBRyxFQUFFLEdBQUcsQ0FBQyxDQUFDO1lBQ3hCLENBQUMsQ0FBQyxDQUFDO1FBQ0wsQ0FBQyxDQUFDLENBQUM7UUFDSCxNQUFNLENBQUMsT0FBTyxDQUFDO0lBQ2pCLENBQUM7Q0FDRjtBQXJDRCwwQ0FxQ0M7QUFFRCxxQkFBNkIsU0FBUSxTQUFpQjtDQUNyRDtBQURELDBDQUNDO0FBRUQ7SUFVRSxZQUFZLFNBQWMsRUFBRTtRQUMxQixJQUFJLENBQUMsT0FBTyxHQUFHLE1BQU0sQ0FBQyxPQUFPLENBQUM7UUFDOUIsSUFBSSxDQUFDLFNBQVMsR0FBRyxNQUFNLENBQUMsU0FBUyxDQUFDO1FBQ2xDLElBQUksQ0FBQyxPQUFPLEdBQUcsTUFBTSxDQUFDLE9BQU8sQ0FBQztRQUM5QixJQUFJLENBQUMsS0FBSyxHQUFHLE1BQU0sQ0FBQyxLQUFLLENBQUM7UUFDMUIsSUFBSSxDQUFDLEtBQUssR0FBRyxNQUFNLENBQUMsS0FBSyxDQUFDO1FBQzFCLElBQUksQ0FBQyxNQUFNLEdBQUcsTUFBTSxDQUFDLE1BQU0sQ0FBQztRQUM1QixJQUFJLENBQUMsZUFBZSxHQUFHLE1BQU0sQ0FBQyxlQUFlLENBQUM7UUFDOUMsSUFBSSxDQUFDLGVBQWUsR0FBRyxNQUFNLENBQUMsZUFBZSxDQUFDO0lBQ2hELENBQUM7Q0FDRjtBQXBCRCwwQkFvQkMiLCJzb3VyY2VzQ29udGVudCI6WyJpbXBvcnQgeyBVdGlscyB9IGZyb20gJ34vdXRpbHMnO1xuaW1wb3J0IHsgUHJvY2Vzc29ySW5wdXQsIFByb2Nlc3Nvck91dHB1dCB9IGZyb20gJ34vbW9kZWxzL3Byb2Nlc3Nvci1tb2RlbHMnO1xuaW1wb3J0IHsgUHJvY2Vzc29yIH0gZnJvbSAnfi9wcm9jZXNzb3JzL3Byb2Nlc3Nvcic7XG5cbi8qKlxuICogU2ltcGxlIDEtdG8tTSBtYXBwaW5nXG4gKi9cbmNsYXNzIE9uZVRvTWFueTxUPiBleHRlbmRzIE1hcDxULCBTZXQ8VD4+IHtcbiAgY29uc3RydWN0b3IoKSB7XG4gICAgc3VwZXIoKTtcbiAgfVxufVxuXG4vKipcbiAqIFRhcmdldCB0byBzb3VyY2VzIG1hcHBpbmdcbiAqL1xuZXhwb3J0IGNsYXNzIFRhcmdldFRvU291cmNlcyBleHRlbmRzIE9uZVRvTWFueTxzdHJpbmc+IHtcbiAgLyoqXG4gICAqIEtleSBpcyB0aGUgdGFyZ2V0IGZpbGUsIHZhbHVlIGlzIGEgc2V0IG9mIHNvdXJjZXMgdGhhdCBjb250cmlidXRlXG4gICAqIHRvIHRoZSB0YXJnZXRcbiAgICovXG4gIGNvbnN0cnVjdG9yKCkge1xuICAgIHN1cGVyKCk7XG4gIH1cblxuICAvKipcbiAgICogRm9yIGVhY2ggdGFyZ2V0LCB0cmFjZSBzb3VyY2VzIGJhY2sgdG8gdGhlaXIgb3JpZ2luYWwgc291cmNlc1xuICAgKiBAcGFyYW0ge09iamVjdH0gb2xkVHRzIE9sZCB0YXJnZXRUb1NvdXJjZXNNYXBcbiAgICovXG4gIHRyYWNlKG9sZFRUUzogVGFyZ2V0VG9Tb3VyY2VzKTogdm9pZCB7XG4gICAgdGhpcy5mb3JFYWNoKCh2YWx1ZXMsIGtleSkgPT4ge1xuICAgICAgbGV0IG5ld1ZhbHVlcyA9IEFycmF5LmZyb20odmFsdWVzKVxuICAgICAgICAgIC5tYXAodiA9PiBvbGRUVFMuZ2V0KHYpKVxuICAgICAgICAgIC5maWx0ZXIodmFscyA9PiAhIXZhbHMpXG4gICAgICAgICAgLnJlZHVjZSgoYWxsLCB2YWxzKSA9PiBVdGlscy51bmlvbihhbGwsIHZhbHMpLCBuZXcgU2V0KCkpO1xuICAgICAgdGhpcy5zZXQoa2V5LCBuZXdWYWx1ZXMpO1xuICAgIH0pO1xuICB9XG5cbiAgLyoqXG4gICAqIENyZWF0ZSBhIG5ldyBtYXAgd2l0aCBmbGlwcGVkIGtleS92YWx1ZXNcbiAgICovXG4gIGZsaXAoKTogU291cmNlVG9UYXJnZXRzIHtcbiAgICBsZXQgZmxpcHBlZDogU291cmNlVG9UYXJnZXRzID0gbmV3IFNvdXJjZVRvVGFyZ2V0cygpO1xuICAgIHRoaXMuZm9yRWFjaCgodmFsdWVzLCBrZXkpID0+IHtcbiAgICAgIHZhbHVlcy5mb3JFYWNoKHZhbCA9PiB7XG4gICAgICAgIGxldCByZXYgPSBmbGlwcGVkLmdldCh2YWwpIHx8IG5ldyBTZXQ8c3RyaW5nPigpO1xuICAgICAgICByZXYuYWRkKGtleSk7XG4gICAgICAgIGZsaXBwZWQuc2V0KHZhbCwgcmV2KTtcbiAgICAgIH0pO1xuICAgIH0pO1xuICAgIHJldHVybiBmbGlwcGVkO1xuICB9XG59XG5cbmV4cG9ydCBjbGFzcyBTb3VyY2VUb1RhcmdldHMgZXh0ZW5kcyBPbmVUb01hbnk8c3RyaW5nPiB7XG59XG5cbmV4cG9ydCBjbGFzcyBDb250ZXh0IHtcbiAgcm9vdERpcjogc3RyaW5nO1xuICBzb3VyY2VEaXI6IHN0cmluZztcbiAgd29ya0Rpcjogc3RyaW5nO1xuICBpbmRleDogbnVtYmVyO1xuICBpbnB1dDogUHJvY2Vzc29ySW5wdXQ7XG4gIG91dHB1dDogUHJvY2Vzc29yT3V0cHV0O1xuICB0YXJnZXRUb1NvdXJjZXM6IFRhcmdldFRvU291cmNlcztcbiAgc291cmNlVG9UYXJnZXRzOiBTb3VyY2VUb1RhcmdldHM7XG5cbiAgY29uc3RydWN0b3IocGFja2VkOiBhbnkgPSB7fSkge1xuICAgIHRoaXMucm9vdERpciA9IHBhY2tlZC5yb290RGlyO1xuICAgIHRoaXMuc291cmNlRGlyID0gcGFja2VkLnNvdXJjZURpcjtcbiAgICB0aGlzLndvcmtEaXIgPSBwYWNrZWQud29ya0RpcjtcbiAgICB0aGlzLmluZGV4ID0gcGFja2VkLmluZGV4O1xuICAgIHRoaXMuaW5wdXQgPSBwYWNrZWQuaW5wdXQ7XG4gICAgdGhpcy5vdXRwdXQgPSBwYWNrZWQub3V0cHV0O1xuICAgIHRoaXMudGFyZ2V0VG9Tb3VyY2VzID0gcGFja2VkLnRhcmdldFRvU291cmNlcztcbiAgICB0aGlzLnNvdXJjZVRvVGFyZ2V0cyA9IHBhY2tlZC5zb3VyY2VUb1RhcmdldHM7XG4gIH1cbn0iXX0= | 100.323529 | 4,970 | 0.880827 | 3.09375 |
4c00a61ce18e0e6bf38c4294d512c9e1642981fe | 1,121 | kt | Kotlin | Boats_to_Save_People_v1.kt | xiekc/leetcode | 56dc65850ac68752f7c6f50f8b35eb23bfafed93 | [
"MIT"
] | null | null | null | Boats_to_Save_People_v1.kt | xiekc/leetcode | 56dc65850ac68752f7c6f50f8b35eb23bfafed93 | [
"MIT"
] | null | null | null | Boats_to_Save_People_v1.kt | xiekc/leetcode | 56dc65850ac68752f7c6f50f8b35eb23bfafed93 | [
"MIT"
] | null | null | null | class Solution {
fun numRescueBoats(people: IntArray, limit: Int): Int {
people.sort()
var left = 0
var right = people.size - 1
var result = 0
while (right >= left) {
var sum = 0
var count = 0
while (right >= left && sum + people[right] <= limit && count < 2) {
sum += people[right]
right--
count++
}
while (right >= left && sum + people[left] <= limit && count < 2) {
sum += people[left]
left++
count++
}
result++
}
return result
}
}
class TestCase(val people: IntArray, val limit: Int)
fun main() {
val solution = Solution()
val testCases = arrayOf(TestCase(intArrayOf(1, 2), 3), TestCase(intArrayOf(1, 1, 1, 2, 2, 2), 3),
TestCase(intArrayOf(3, 2, 2, 1), 3), TestCase(intArrayOf(3, 5, 3, 4), 5),
TestCase(intArrayOf(3, 2, 3, 2, 2), 6))
for (case in testCases) {
println(solution.numRescueBoats(case.people, case.limit))
}
}
| 30.297297 | 101 | 0.482605 | 3.296875 |
9c0464d3febf40d0d8a4cd5697b9e27ca24b15b7 | 2,912 | js | JavaScript | src/domains/components/SlaveZone.js | mighteejim/manager | 86365b7142a78956e04821c0cb9e0adadaf0de3c | [
"BSD-3-Clause"
] | null | null | null | src/domains/components/SlaveZone.js | mighteejim/manager | 86365b7142a78956e04821c0cb9e0adadaf0de3c | [
"BSD-3-Clause"
] | null | null | null | src/domains/components/SlaveZone.js | mighteejim/manager | 86365b7142a78956e04821c0cb9e0adadaf0de3c | [
"BSD-3-Clause"
] | null | null | null | import React, { Component, PropTypes } from 'react';
import { connect } from 'react-redux';
import { Link } from 'react-router';
import { replace } from 'react-router-redux';
import { Card, CardHeader } from 'linode-components/cards';
import { Table } from 'linode-components/tables';
import { ButtonCell } from 'linode-components/tables/cells';
import { showModal, hideModal } from '~/actions/modal';
import EditSOARecord from '../components/EditSOARecord';
export class SlaveZone extends Component {
renderSOAEditRecord() {
const { dispatch, domain } = this.props;
dispatch(showModal(
'Edit SOA Record',
<EditSOARecord
dispatch={dispatch}
domains={domain}
close={(newDomain) => () => {
dispatch(hideModal());
dispatch(replace(`/domains/${newDomain || domain.domain}`));
}}
/>
));
}
renderEditRecord(title, component, props = {}) {
const { dispatch, domain } = this.props;
dispatch(showModal(
title,
React.createElement(component, {
...props,
dispatch,
zone: domain,
close: () => dispatch(hideModal()),
}),
));
}
renderEditSOARecord(title) {
return this.renderEditRecord(title, EditSOARecord);
}
render() {
const { domain } = this.props;
if (!domain) {
return null;
}
const { axfr_ips, master_ips } = domain;
const soaRecord = {
...domain,
axfr_ips: axfr_ips.map(ip => <div key={ip}>{ip}</div>),
master_ips: master_ips.map(ip => <div key={ip}>{ip}</div>),
};
return (
<div>
<header className="main-header main-header--border">
<div className="container">
<Link to="/domains">Domains</Link>
<h1 title={domain.id}>
{domain.group ? `${domain.group} / ` : ''}
{domain.domain}
</h1>
</div>
</header>
<div className="container">
<Card
id="soa"
header={
<CardHeader title="SOA Record" />
}
>
<Table
className="Table--secondary"
columns={[
{ dataKey: 'domain', label: 'Primary Domain' },
{ dataKey: 'axfr_ips', label: 'Domain Transfers' },
{ dataKey: 'master_ips', label: 'Masters' },
{
cellComponent: ButtonCell,
headerClassName: 'ButtonColumn',
text: 'Edit',
onClick: () => {
this.renderSOAEditRecord();
},
},
]}
data={[soaRecord]}
/>
</Card>
</div>
</div>
);
}
}
SlaveZone.propTypes = {
dispatch: PropTypes.func.isRequired,
domain: PropTypes.object.isRequired,
};
export default connect()(SlaveZone);
| 26.472727 | 70 | 0.522665 | 3.171875 |
653d695cd3021eadcb097d2dd9fa97ca942ec702 | 10,454 | py | Python | etc/check-python.py | maxzheng/auto-update | 7d9afa139f890ff9a6bbeb01549a311bdb5168d0 | [
"MIT"
] | 7 | 2018-08-22T21:03:54.000Z | 2022-02-04T20:31:20.000Z | etc/check-python.py | maxzheng/auto-update | 7d9afa139f890ff9a6bbeb01549a311bdb5168d0 | [
"MIT"
] | null | null | null | etc/check-python.py | maxzheng/auto-update | 7d9afa139f890ff9a6bbeb01549a311bdb5168d0 | [
"MIT"
] | 2 | 2019-04-24T20:49:01.000Z | 2019-10-30T17:45:19.000Z | #!/usr/bin/env python
import argparse
import os
import platform
import re
import shutil
import subprocess
import sys
SUPPORTED_VERSIONS = ('3.6', '3.7')
IS_DEBIAN = platform.system() == 'Linux' and os.path.exists('/etc/debian_version')
IS_OLD_UBUNTU = (IS_DEBIAN and os.path.exists('/etc/lsb-release')
and re.search('RELEASE=1[46]', open('/etc/lsb-release').read()))
IS_MACOS = platform.system() == 'Darwin'
SUDO = 'sudo ' if os.getuid() else ''
parser = argparse.ArgumentParser(description='Check and fix Python installation')
parser.add_argument('--autofix', action='store_true', help='Automatically fix any problems found')
parser.add_argument('--version', default=SUPPORTED_VERSIONS[0], choices=SUPPORTED_VERSIONS,
help='Python version to check')
args = parser.parse_args()
PY_VERSION = args.version
AUTOFIX = args.autofix
def check_sudo():
if not run('which sudo', return_output=True):
error('! sudo is not installed.')
print(' Please ask an administrator to install it and run this again.')
sys.exit(1)
def check_apt():
os.environ['DEBIAN_FRONTEND'] = 'noninteractive'
run(SUDO + 'apt-get install -y apt-utils', return_output=True)
def check_curl():
if not run('which curl', return_output=True):
error('! curl is not installed.')
if IS_DEBIAN:
raise AutoFixSuggestion('To install, run', SUDO + 'apt-get install -y curl')
sys.exit(1)
def check_python():
py3_path = run('which python' + PY_VERSION, return_output=True)
if not py3_path:
error('! Python ' + PY_VERSION + ' is not installed.')
if '--version' not in sys.argv:
print(' autopip supports Python {}.'.format(', '.join(SUPPORTED_VERSIONS))
+ ' To check a different version, re-run using "python - --version x.y"')
if IS_OLD_UBUNTU:
raise AutoFixSuggestion('To install, run',
(SUDO + 'apt-get update',
SUDO + 'apt-get install -y software-properties-common',
SUDO + 'add-apt-repository -y ppa:deadsnakes/ppa',
SUDO + 'apt-get update',
SUDO + 'apt-get install -y python' + PY_VERSION))
elif IS_DEBIAN:
raise AutoFixSuggestion('To install, run',
(SUDO + 'apt-get update', SUDO + 'apt-get install -y python' + PY_VERSION))
elif IS_MACOS:
raise AutoFixSuggestion('To install, run', 'brew install python')
print(' Please install Python ' + PY_VERSION
+ ' per http://docs.python-guide.org/en/latest/starting/installation/')
sys.exit(1)
def check_pip():
if not run('which pip3', return_output=True):
error('! pip3 is not installed.')
if IS_DEBIAN:
raise AutoFixSuggestion('To install, run', SUDO + 'apt-get install -y python3-pip')
elif IS_MACOS:
raise AutoFixSuggestion('To install, run', 'curl -s https://bootstrap.pypa.io/get-pip.py | '
+ SUDO + 'python' + PY_VERSION)
print(' If your package repo has a *-pip package for Python ' + PY_VERSION
+ ', then installing it from there is recommended.')
print(' To install directly, run: curl -s https://bootstrap.pypa.io/get-pip.py | '
+ SUDO + 'python' + PY_VERSION)
sys.exit(1)
version_full = run('pip3 --version', return_output=True)
if 'python ' + PY_VERSION not in version_full:
print(' ' + version_full.strip())
error('! pip3 is pointing to another Python version and not Python ' + PY_VERSION)
if '--version' not in sys.argv:
print(' autopip supports Python {}.'.format(', '.join(SUPPORTED_VERSIONS))
+ ' To check a different version, re-run using "python - --version x.y"')
raise AutoFixSuggestion('To re-install for Python ' + PY_VERSION + ', run',
'curl -s https://bootstrap.pypa.io/get-pip.py | ' + SUDO + 'python' + PY_VERSION)
version_str = version_full.split()[1]
version = tuple(map(_int_or, version_str.split('.', 2)))
if version < (9, 0, 3):
error('! Version is', version_str + ', but should be 9.0.3+')
raise AutoFixSuggestion('To upgrade, run', SUDO + 'pip3 install pip==9.0.3')
def check_venv():
test_venv_path = '/tmp/check-python-venv-{}'.format(os.getpid())
try:
try:
run('python' + PY_VERSION + ' -m venv ' + test_venv_path, stderr=subprocess.STDOUT, return_output=True,
raises=True)
except Exception:
error('! Could not create virtual environment.')
if IS_DEBIAN:
raise AutoFixSuggestion('To install, run', SUDO + 'apt-get install -y python' + PY_VERSION + '-venv')
print(' Please make sure Python venv package is installed.')
sys.exit(1)
finally:
shutil.rmtree(test_venv_path, ignore_errors=True)
try:
try:
run('virtualenv --python python' + PY_VERSION + ' ' + test_venv_path, stderr=subprocess.STDOUT,
return_output=True,
raises=True)
except Exception as e:
if run('which virtualenv', return_output=True):
error('! Could not create virtual environment.')
print(' ' + str(e))
sys.exit(1)
else:
error('! virtualenv is not installed.')
raise AutoFixSuggestion('To install, run', SUDO + 'pip3 install virtualenv')
finally:
shutil.rmtree(test_venv_path, ignore_errors=True)
def check_setuptools():
try:
version_str = run('python' + PY_VERSION + ' -m easy_install --version', return_output=True, raises=True)
except Exception:
error('! setuptools is not installed.')
raise AutoFixSuggestion('To install, run', SUDO + 'pip3 install setuptools')
version_str = version_str.split()[1]
version = tuple(map(_int_or, version_str.split('.')))
if version < (39,):
error('! Version is', version_str + ', but should be 39+')
raise AutoFixSuggestion('To upgrade, run', SUDO + 'pip3 install -U setuptools')
def check_wheel():
try:
version_str = run('python' + PY_VERSION + ' -m wheel version ', return_output=True, raises=True)
except Exception:
error('! wheel is not installed.')
raise AutoFixSuggestion('To install, run', SUDO + 'pip3 install wheel')
version_str = version_str.split()[1]
version = tuple(map(_int_or, version_str.split('.')))
if version < (0, 31):
error('! Version is', version_str + ', but should be 0.31+')
raise AutoFixSuggestion('To upgrade, run', SUDO + 'pip3 install -U wheel')
def check_python_dev():
include_path = run('python' + PY_VERSION
+ ' -c "from distutils.sysconfig import get_python_inc; print(get_python_inc())"',
return_output=True)
if not include_path:
error('! Failed to get Python include path, so not sure if Python dev package is installed')
if IS_DEBIAN:
raise AutoFixSuggestion('To install, run', SUDO + ' apt-get install -y python' + PY_VERSION + '-dev')
sys.exit(1)
python_h = os.path.join(include_path.strip(), 'Python.h')
if not os.path.exists(python_h):
error('! Python dev package is not installed as', python_h, 'does not exist')
if IS_DEBIAN:
raise AutoFixSuggestion('To install, run', SUDO + 'apt-get install -y python' + PY_VERSION + '-dev')
sys.exit(1)
def run(cmd, return_output=False, raises=False, **kwargs):
print('+ ' + str(cmd))
if '"' in cmd or '|' in cmd:
kwargs['shell'] = True
elif isinstance(cmd, str):
cmd = cmd.split()
check_call = subprocess.check_output if return_output else subprocess.check_call
try:
output = check_call(cmd, **kwargs)
if isinstance(output, bytes):
output = output.decode('utf-8')
return output
except Exception:
if return_output and not raises:
return
else:
raise
def _int_or(value):
try:
return int(value)
except Exception:
return value
def error(*msg):
msg = ' '.join(map(str, msg))
echo(msg, color=None if AUTOFIX else 'red')
def echo(msg, color=None):
if sys.stdout.isatty() and color:
if color == 'red':
color = '\033[0;31m'
elif color == 'green':
color = '\033[92m'
msg = color + msg + '\033[0m'
print(msg)
class AutoFixSuggestion(Exception):
def __init__(self, instruction, cmd):
super(AutoFixSuggestion, self).__init__(instruction)
self.cmd = cmd
checks = [check_python, check_pip, check_venv, check_setuptools, check_wheel, check_python_dev]
if AUTOFIX:
checks.insert(0, check_curl)
if IS_DEBIAN:
checks.insert(0, check_apt)
if SUDO:
checks.insert(0, check_sudo)
try:
last_fix = None
for check in checks:
print('Checking ' + check.__name__.split('_', 1)[1].replace('_', ' '))
while True:
try:
check()
break
except AutoFixSuggestion as e:
cmds = e.cmd if isinstance(e.cmd, tuple) else (e.cmd,)
if AUTOFIX:
if cmds == last_fix:
error('! Failed to fix automatically, so you gotta fix it yourself.')
sys.exit(1)
else:
for cmd in cmds:
run(cmd, return_output=True, raises=True)
last_fix = cmds
else:
print(' ' + str(e) + ': ' + ' && '.join(cmds) + '\n')
print('# Run the above suggested command(s) manually and then re-run to continue checking,')
print(' or re-run using "python - --autofix" to run all suggested commands automatically.')
sys.exit(1)
print('')
except Exception as e:
error('!', str(e))
sys.exit(1)
except KeyboardInterrupt:
sys.exit(1)
echo('Python is alive and well. Good job!', color='green')
| 34.730897 | 117 | 0.583604 | 3.140625 |
7932ee22245535cb2f3a6aaa3cca03f8b87a4bd8 | 6,704 | kt | Kotlin | src/test/kotlin/com/github/vertical_blank/sqlformatter/N1qlFormatterTest.kt | Willam2004/sql-formatter | bb9d26533e417378730aed91fc3e75f0994d1fb5 | [
"MIT"
] | 106 | 2019-06-04T13:57:09.000Z | 2022-03-17T12:44:45.000Z | src/test/kotlin/com/github/vertical_blank/sqlformatter/N1qlFormatterTest.kt | Willam2004/sql-formatter | bb9d26533e417378730aed91fc3e75f0994d1fb5 | [
"MIT"
] | 42 | 2019-04-04T16:42:27.000Z | 2022-03-01T14:13:35.000Z | src/test/kotlin/com/github/vertical_blank/sqlformatter/N1qlFormatterTest.kt | Willam2004/sql-formatter | bb9d26533e417378730aed91fc3e75f0994d1fb5 | [
"MIT"
] | 25 | 2019-10-02T07:29:29.000Z | 2022-01-21T02:46:27.000Z | package com.github.vertical_blank.sqlformatter
import com.github.vertical_blank.sqlformatter.features.supportsBetween
import com.github.vertical_blank.sqlformatter.features.supportsJoin
import com.github.vertical_blank.sqlformatter.features.supportsOperators
import com.github.vertical_blank.sqlformatter.features.supportsSchema
import com.github.vertical_blank.sqlformatter.features.supportsStrings
import com.github.vertical_blank.sqlformatter.languages.Dialect
import com.github.vertical_blank.sqlformatter.languages.StringLiteral
import org.spekframework.spek2.Spek
import org.spekframework.spek2.style.specification.describe
object N1qlFormatterTest :
Spek({
val formatter = SqlFormatter.of(Dialect.N1ql)
describe("N1qlFormatter") {
with(formatter) {
behavesLikeSqlFormatter(formatter)
supportsStrings(
formatter,
listOf(
StringLiteral.DOUBLE_QUOTE, StringLiteral.SINGLE_QUOTE, StringLiteral.BACK_QUOTE))
supportsBetween(formatter)
supportsSchema(formatter)
supportsOperators(formatter, listOf("%", "==", "!="))
supportsJoin(formatter, without = listOf("FULL", "CROSS", "NATURAL"))
it("formats SELECT query with element selection expression") {
val result = format("SELECT order_lines[0].productId FROM orders;")
expect(result)
.toBe(
"""
SELECT
order_lines[0].productId
FROM
orders;
""".trimIndent())
}
it("formats SELECT query with primary key querying") {
val result = format("SELECT fname, email FROM tutorial USE KEYS ['dave', 'ian'];")
expect(result)
.toBe(
"""
SELECT
fname,
email
FROM
tutorial
USE KEYS
['dave', 'ian'];
""".trimIndent())
}
it("formats INSERT with {} object literal") {
val result =
format("INSERT INTO heroes (KEY, VALUE) VALUES ('123', {'id':1,'type':'Tarzan'});")
expect(result)
.toBe(
"""
INSERT INTO
heroes (KEY, VALUE)
VALUES
('123', {'id': 1, 'type': 'Tarzan'});
""".trimIndent())
}
it("formats INSERT with large object and array literals") {
val result =
format(
"""
INSERT INTO heroes (KEY, VALUE) VALUES ('123', {'id': 1, 'type': 'Tarzan',
'array': [123456789, 123456789, 123456789, 123456789, 123456789], 'hello': 'world'});
""")
expect(result)
.toBe(
"""
INSERT INTO
heroes (KEY, VALUE)
VALUES
(
'123',
{
'id': 1,
'type': 'Tarzan',
'array': [
123456789,
123456789,
123456789,
123456789,
123456789
],
'hello': 'world'
}
);
""".trimIndent())
}
it("formats SELECT query with UNNEST top level reserver word") {
val result = format("SELECT * FROM tutorial UNNEST tutorial.children c;")
expect(result)
.toBe(
"""
SELECT
*
FROM
tutorial
UNNEST
tutorial.children c;
""".trimIndent())
}
it("formats SELECT query with NEST and USE KEYS") {
val result =
format(
"""
SELECT * FROM usr
USE KEYS 'Elinor_33313792' NEST orders_with_users orders
ON KEYS ARRAY s.order_id FOR s IN usr.shipped_order_history END;
""")
expect(result)
.toBe(
"""
SELECT
*
FROM
usr
USE KEYS
'Elinor_33313792'
NEST
orders_with_users orders ON KEYS ARRAY s.order_id FOR s IN usr.shipped_order_history END;
""".trimIndent())
}
it("formats explained DELETE query with USE KEYS and RETURNING") {
val result = format("EXPLAIN DELETE FROM tutorial t USE KEYS 'baldwin' RETURNING t")
expect(result)
.toBe(
"""
EXPLAIN DELETE FROM
tutorial t
USE KEYS
'baldwin' RETURNING t
""".trimIndent())
}
it("formats UPDATE query with USE KEYS and RETURNING") {
val result =
format(
"UPDATE tutorial USE KEYS 'baldwin' SET type = 'actor' RETURNING tutorial.type")
expect(result)
.toBe(
"""
UPDATE
tutorial
USE KEYS
'baldwin'
SET
type = 'actor' RETURNING tutorial.type
""".trimIndent())
}
it("recognizes \$variables") {
val result = format("SELECT \$variable, \$'var name', \$\"var name\", \$`var name`;")
expect(result)
.toBe(
"""
SELECT
${"$"}variable,
${"$"}'var name',
${"$"}"var name",
${"$"}`var name`;
""".trimIndent())
}
it("replaces \$variables with param values") {
val result =
format(
"SELECT \$variable, $'var name', \$\"var name\", \$`var name`;",
mapOf(
"variable" to """"variable value"""",
"var name" to "'var value'",
))
expect(result)
.toBe(
"""
SELECT
"variable value",
'var value',
'var value',
'var value';
""".trimIndent())
}
it("replaces $ numbered placeholders with param values") {
val result =
format(
"SELECT \$1, \$2, \$0;",
mapOf(
"0" to "first",
"1" to "second",
"2" to "third",
))
expect(result)
.toBe(
"""
SELECT
second,
third,
first;
""".trimIndent())
}
}
}
})
| 30.894009 | 100 | 0.459427 | 3.015625 |
f01114fcd31b24a944a91cf16636601c7b3cffa8 | 6,134 | py | Python | src/func.py | yygr/datascience_utility | aa6aa37508e46ab3568805dd1bb514ef10652240 | [
"MIT"
] | null | null | null | src/func.py | yygr/datascience_utility | aa6aa37508e46ab3568805dd1bb514ef10652240 | [
"MIT"
] | null | null | null | src/func.py | yygr/datascience_utility | aa6aa37508e46ab3568805dd1bb514ef10652240 | [
"MIT"
] | null | null | null | from pdb import set_trace
from time import time
import matplotlib.pyplot as plt
import numpy as np
from numpy import random
from scipy.stats import chi2
import renom as rm
class Enc(rm.Model):
def __init__(
self, pre, latent_dim,
output_act = None,
):
self.pre = pre
self.latent_dim = latent_dim
self.zm_ = rm.Dense(latent_dim)
self.zlv_ = rm.Dense(latent_dim)
self.output_act = output_act
def forward(self, x):
hidden = self.pre(x)
self.zm = self.zm_(hidden)
self.zlv = self.zlv_(hidden)
if self.output_act:
self.zm = self.output_act(self.zm)
self.zlv = self.output_act(self.zlv)
return self.zm
class VAE(rm.Model):
def __init__(
self,
enc,
dec,
latent_dim,
batch_size = None,
sigma = 1.
):
self.latent_dim = latent_dim
self.enc = enc
self.dec = dec
self.batch_size = batch_size
self.sigma = sigma
def forward(self, x, eps=1e-3):
nb = len(x)
self.enc(x)
e = np.random.randn(nb, self.latent_dim)*self.sigma
self.z = self.enc.zm + rm.exp(self.enc.zlv/2)*e
self.decd = self.dec(self.z)
self.reconE = rm.mean_squared_error(self.decd, x)
self.kl_loss = - 0.5 * rm.sum(
1 + self.enc.zlv - self.enc.zm**2 -rm.exp(self.enc.zlv)
)/nb
self.vae_loss = self.kl_loss + self.reconE
return self.decd
class Mahalanobis():
def __init__(self, data, label):
self.i_max = label.max() + 1
self.labels = np.unique(label)
self.d = data.shape[-1]
#print('Computing the mean')
#s = time.time()
self.mu = np.array([
data[np.where(label==x)[0]].mean(0) for x in self.labels])
#print(' {}sec'.format(time.time() - s))
#print('Computing Cov')
#s = time.time()
self.cov = np.array([
np.cov(data[np.where(label==x)[0]].T) for x in self.labels])
#print(' {}sec'.format(time.time() - s))
#n()
print('Computing Dist')
s = time()
self.comp_dist(data, label)
print(' {}sec'.format(time() - s))
#self.set_th()
def stat(self):
print('latent dimention = {}'.format(self.d))
print('{} classifier'.format(self.i_max))
def a(self, x, i):
temp = x-self.mu[i]
#return np.dot(np.dot(temp, np.linalg.inv(self.cov[i])), temp.T)
return np.dot(temp, np.linalg.solve(self.cov[i], temp.T))
def al(self, x):
return [self.a(x, i) for i in range(self.i_max)]
def comp_dist(self, data, label):
dist = []
if 0:
for x in self.labels:
sub = data[np.where(label==x)[0]]
dist.append(np.array([self.al(x) for x in sub]))
#dist.append(np.diagonal(np.dot(np.dot(sub,self.cov[i]),sub.T)))
else:
for x in self.labels:
sub = data[np.where(label==x)[0]]
sub_dist = []
for i, y in enumerate(self.labels):
temp = sub - self.mu[i]
sub_dist.append(np.diag(
np.dot(temp,
np.linalg.sove(self.cov[i], temp.T))
))
self.dist = np.array(dist)
def get_dist(self, data):
res = np.zeros((len(data), self.i_max))
for i in range(self.i_max):
temp = data - self.mu[i]
res[:,i] = np.diag(
np.dot(temp,
np.linalg.solve(self.cov[i], temp.T))
)
return res
#return np.array([self.al(x) for x in data])
def gamma(self,n):
return np.prod(np.arange(1,n))
def chi_squared(self, u, k, s):
a = 2*s
b = k//2
t = u/a
v = np.power(t,b-1)*np.exp(-t)/a/self.gamma(b)
return v
def comp_th(self, th):
assert th <= 1, "{}:th must be lower than 1 or equal to 1".format(th)
dth = 1 - th
return chi2.isf(dth, self.d)
def get_ths(self, ths):
ths_ = np.sort(ths)
acc = 0
split = 1e6
maxv = 100
delta = maxv/split
athl = []
ath = 0
pre = 0
for dth in ths_:
while acc < dth:
check_value = '\r{}'.format(acc)
sys.stdout.write(check_value)
sys.stdout.flush()
acc += self.chi_squared(ath, self.d, 1) * delta
ath += delta
athl.append(ath)
print('')
return np.array(athl)
def set_th(self, th=0.001):
th = self.comp_th(th)
self.th = th
def predict(self, data, th=None):
res = self.get_dist(data)
if th is None:
return res / self.th
return res / th
def predicts(self, data, ths):
temp = self.get_dist(data)
res = []
for th in ths:
res.append(temp/th)
return np.array(res)
def predict_prob(self, data):
res = self.get_dist(data)
prob_all = []
for item in res:
subprob = []
for i, x in enumerate(item):
distance = self.cumprob[i][0]
prob = self.cumprob[i][1]
if distance[-1] < x:
subprob.append(prob[-1])
else:
subprob.append(prob[np.argmax(distance>x)-1])
prob_all.append(np.array(subprob))
return res/self.th, np.array(prob_all)
def comp_cummlative_probability(self, bins=100):
cumprob = []
for i in range(self.dist.shape[0]):
hist, x = np.histogram(np.sort(self.dist[i][:,i]), bins)
cum_hist = np.array([hist[:j].sum() for j,_ in enumerate(hist)])
cum_hist = 1 - cum_hist/cum_hist.max().astype('float')
cumprob.append((x[:-1],cum_hist))
self.cumprob = np.array(cumprob)
| 31.137056 | 80 | 0.5 | 3.046875 |
85709306e36a3164506f46bde9238c0cc13c1a5c | 3,859 | js | JavaScript | src/components/Contact.js | MarcinRapacz/project1 | 009326f0b645d295b87e5207d9313c59395660b8 | [
"MIT"
] | null | null | null | src/components/Contact.js | MarcinRapacz/project1 | 009326f0b645d295b87e5207d9313c59395660b8 | [
"MIT"
] | 4 | 2021-05-10T09:26:07.000Z | 2022-02-18T08:14:11.000Z | src/components/Contact.js | MarcinRapacz/project1 | 009326f0b645d295b87e5207d9313c59395660b8 | [
"MIT"
] | null | null | null | import React, { useState } from "react"
import { FaAtom } from "react-icons/fa"
import axios from "axios"
import validator from "validator"
const Contact = () => {
const defaultForm = {
name: {
value: "",
error: false,
},
email: {
value: "",
error: false,
},
message: {
value: "",
error: false,
},
}
const [data, setData] = useState(defaultForm)
const [message, setMessage] = useState(false)
const [loading, setLoading] = useState(false)
const handleChange = e => {
let isValid = false
let error = true
switch (e.target.name) {
case "name":
isValid = validator.isLength(e.target.value, { min: 2, max: 64 })
error = isValid ? false : "Enter the correct name"
break
case "email":
isValid = validator.isEmail(e.target.value)
error = isValid ? false : "Enter the correct email"
break
case "message":
isValid = validator.isLength(e.target.value, { min: 2, max: 400 })
error = isValid ? false : "Enter the correct message"
break
default:
break
}
setData({
...data,
[e.target.name]: {
value: e.target.value,
error,
},
})
}
const handleSubmit = async e => {
e.preventDefault()
const dataToSend = {}
const errors = []
setLoading(true)
for (const field in data) {
dataToSend[field] = data[field].value
if (data[field].error) errors.push(data[field].error)
if (!data[field].value) errors.push(data[field].error)
}
if (errors.length) {
setMessage("Complete all fields")
} else {
await axios
.post("https://microservice-send-email.herokuapp.com/api/sendEmail", {
data: dataToSend,
API_KEY:
"c9047f6cd74f2aa734ffca287eef938ad89552c273b38f5911e3b60857995493",
})
.then(res => {
setData(defaultForm)
setMessage("Message was sent")
})
.catch(err => {
setMessage("Something went wrong. Try again")
})
}
setTimeout(() => setMessage(false), 5000)
setLoading(false)
}
return (
<section className="contact container" id="contact">
<div className="contact__content">
<form onSubmit={handleSubmit} className="contact__form">
<input
type="text"
placeholder="Name"
name="name"
className="contact__input"
value={data.name.value}
onChange={handleChange}
/>
<p
className={`contact__error ${data.name.error &&
"contact__error--show"}`}
>
{data.name.error}
</p>
<input
type="email"
placeholder="Email adress"
name="email"
className="contact__input"
value={data.email.value}
onChange={handleChange}
/>
<p
className={`contact__error ${data.email.error &&
"contact__error--show"}`}
>
{data.email.error}
</p>
<textarea
name="message"
placeholder="Message..."
className="contact__textarea"
value={data.message.value}
onChange={handleChange}
></textarea>
<p
className={`contact__error ${data.message.error &&
"contact__error--show"}`}
>
{data.message.error}
</p>
{loading ? (
<FaAtom className="contact__icon" />
) : (
<button type="submit" className="contact__btn">
{message ? message : "Submit message"}
</button>
)}
</form>
</div>
</section>
)
}
export default Contact
| 25.726667 | 79 | 0.524747 | 3.109375 |
0eb9d5c7436f5a67ee5b0c847a9ed2b7e2f98a6e | 775 | sql | SQL | createDataBase.sql | fatemehMirabdollahi/to-do_frontEnd | a209229407d1b1e724559f85c2dd2be5faf3faa1 | [
"MIT"
] | 3 | 2021-04-22T19:09:44.000Z | 2021-08-07T19:14:15.000Z | createDataBase.sql | fatemehMirabdollahi/to-do_frontEnd | a209229407d1b1e724559f85c2dd2be5faf3faa1 | [
"MIT"
] | 1 | 2021-02-28T19:17:41.000Z | 2021-02-28T19:18:42.000Z | createDataBase.sql | fatemehMirabdollahi/to-do_frontEnd | a209229407d1b1e724559f85c2dd2be5faf3faa1 | [
"MIT"
] | null | null | null | CREATE TABLE list (
list_title VARCHAR(50) NOT NULL,
undone INTEGER DEFAULT 0
PRIMARY KEY(list_title)
);
CREATE TABLE task (
task_id INT GENERATED ALWAYS AS IDENTITY,
task_title VARCHAR(50) NOT NULL,
important BOOLEAN DEFAULT FALSE,
myday BOOLEAN DEFAULT FALSE,
done BOOLEAN DEFAULT FALSE,
step_num INTEGER DEFAULT 0,
done_step INTEGER DEFAULT 0,
PRIMARY KEY(task_id)
CONSTRAINT fk_list
FOREIGN KEY(list_title)
REFERENCES list(list_title)
ON DELETE CASCADE
ON UPDATE CASCADE
);
CREATE TABLE step(
step_id INT GENERATED ALWAYS AS IDENTITY,
title VARCHAR(50) NOT NULL,
done BOOLEAN DEFAULT FALSE,
PRIMARY KEY (step_id),
CONSTRAINT fk_task
FOREIGN KEY(task_id)
REFERENCES task(task_id)
ON DELETE CASCADE
)
| 25 | 43 | 0.734194 | 3 |
280efac39846b803b9ee69f6d22bf6a79e31e3b4 | 1,311 | rb | Ruby | app/controllers/admin/admin_users_controller.rb | paa-yaw/referral_system | 7d101fd3eef4020e3b2107f7cb75a174350332f1 | [
"Unlicense"
] | 1 | 2019-06-17T15:10:14.000Z | 2019-06-17T15:10:14.000Z | app/controllers/admin/admin_users_controller.rb | paa-yaw/referral_system | 7d101fd3eef4020e3b2107f7cb75a174350332f1 | [
"Unlicense"
] | 5 | 2021-05-18T22:43:47.000Z | 2022-03-30T22:50:59.000Z | app/controllers/admin/admin_users_controller.rb | paa-yaw/referral_system | 7d101fd3eef4020e3b2107f7cb75a174350332f1 | [
"Unlicense"
] | null | null | null | class Admin::AdminUsersController < Admin::ApplicationController
before_action :set_user, only: [:show, :edit, :update, :destroy, :update_button]
layout "admin"
def index
@admins = User.all.includes(:companies).where(admin: true, admin_status: 1)
end
def show
end
def new
@admin = User.new
end
def create
@admin = User.new(user_params)
if @admin.save
@admin.update(admin: true, admin_status: 1)
flash[:success] = "you have successfully created an admin"
redirect_to [:admin, :admin, @admin]
else
flash[:alert] = "oops! sthg went wrong"
render :new
end
end
def edit
end
def update
if params[:user][:password].blank?
params[:user].delete(:password)
end
if @admin.update(user_params)
@admin.update(update_button: false)
flash[:success] = "you have successfully updated #{@admin.username}'s details"
redirect_to :back
else
flash.now[:alert] = "oops! sthg went wrong"
render :edit
end
end
def destroy
@admin.destroy
redirect_to :back
end
def update_button
@admin.update(update_button: true)
redirect_to :back
end
private
def set_user
@admin = User.find(params[:id])
end
def user_params
params.require(:user).permit!
end
end
| 19 | 83 | 0.650648 | 3.109375 |
f0425b1ddda33471bcd698350aad4a8f84b9b335 | 1,837 | py | Python | mnt/us/kapps/apps/gallery/gallery.py | PhilippMundhenk/kapps | eed07669d8554393bfbd40acd8d255475e90b88e | [
"MIT"
] | 1 | 2021-11-19T08:40:44.000Z | 2021-11-19T08:40:44.000Z | mnt/us/kapps/apps/gallery/gallery.py | PhilippMundhenk/kapps | eed07669d8554393bfbd40acd8d255475e90b88e | [
"MIT"
] | null | null | null | mnt/us/kapps/apps/gallery/gallery.py | PhilippMundhenk/kapps | eed07669d8554393bfbd40acd8d255475e90b88e | [
"MIT"
] | null | null | null | from core.kapp import Kapp
from core.httpResponse import HTTPResponse
from core.Kcommand import Kcommand
import uuid
import os
class GetImage(Kcommand):
getImageHash = str(uuid.uuid4())
def __init__(self):
super(GetImage, self).__init__(
"GetImage", self.getImageHash)
class ViewImage(Kcommand):
viewImageHash = str(uuid.uuid4())
def __init__(self):
super(ViewImage, self).__init__(
"ViewImage", self.viewImageHash)
class GalleryApp(Kapp):
name = "Gallery"
def getImageCallback(self, kcommand):
with open(kcommand.getParameter("path"), 'r') as file:
return HTTPResponse(content=file.read())
def viewImageCallback(self, kcommand):
cmd = GetImage()
cmd.params = dict(kcommand.params)
return HTTPResponse(content=self.getRes("image.html").replace("$IMAGE$", "<img style=\"width:100%;\" src=" + cmd.toURL() + " />"))
def homeCallback(self, kcommand):
path = "/mnt/us/images/"
files = os.listdir(path)
paths = [os.path.join(path, basename) for basename in files]
text = ""
for p in paths:
text = text + "<tr><td>"
imageURL = ViewImage().setParameter("path", p).toURL()
text = text + "<a href=\"" + \
imageURL + "\">" + p.replace(path, "") + "</a>"
text = text + "</td></tr>"
return HTTPResponse(content=self.getRes("imageList.html").replace("$IMAGES$", text))
def iconCallback(self, kcommand):
return HTTPResponse(content=self.getRes("icon.png"))
def register(appID, appPath, ctx):
print("register " + GalleryApp.name)
app = GalleryApp(appID, appPath, ctx)
app.subscribe(GetImage(), app.getImageCallback)
app.subscribe(ViewImage(), app.viewImageCallback)
return app
| 30.114754 | 138 | 0.619488 | 3.359375 |
004e9101a653a8b4f1ea24fed6f1c40459546fad | 1,952 | kt | Kotlin | app/src/main/java/com/zdog/demo/ui/effects/sunny/Clouds.kt | DavosLi3hn9g/legal-informaticsp | a80c79cc0afeae6161eefbe69d7ab9cbb3cbd85c | [
"MIT"
] | 21 | 2019-09-27T11:21:47.000Z | 2022-03-03T20:32:27.000Z | app/src/main/java/com/zdog/demo/ui/effects/sunny/Clouds.kt | DavosLi3hn9g/legal-informaticsp | a80c79cc0afeae6161eefbe69d7ab9cbb3cbd85c | [
"MIT"
] | null | null | null | app/src/main/java/com/zdog/demo/ui/effects/sunny/Clouds.kt | DavosLi3hn9g/legal-informaticsp | a80c79cc0afeae6161eefbe69d7ab9cbb3cbd85c | [
"MIT"
] | 5 | 2019-10-15T01:56:50.000Z | 2021-04-16T14:08:16.000Z | package com.zdog.demo.ui.effects.sunny
import com.zdog.demo.ui.effects.Entity
import com.zdog.demo.ui.effects.cloud1
import com.zdog.demo.ui.effects.cloud2
import com.zdog.demo.ui.effects.layerSpace
import com.zdog.demo.ui.shapes.*
import com.zdog.demo.ui.shapes.Colors.white
import com.zdog.library.render.*
class Clouds : Entity() {
private val container = combine {
color = white.color
}
private val cloud1 = cloud1 {
addTo = container
}
private val cloud2 = cloud1.copy {
rotate(y = (-TAU * 1 / 8).toFloat())
scale { x = (1 / Math.cos(TAU * 1 / 8) * -1).toFloat() }
}
private val cloud3 = cloud2 {
addTo = container
}
private fun init() {
cloud1.scale(0.3f)
cloud1.translate {
x = -84f; y = -28f
z = layerSpace * -1
}
cloud2.scale(0.4f)
cloud2.translate {
x = -38f; y = -22f
z = layerSpace * -0.5f
}
cloud3.scale(0.5f)
cloud3.translate {
x = 72f; y = -52f
z = layerSpace * -1
}
}
override fun onAttachTo(world: World, inDay: Boolean) {
world.addChild(container)
init()
cloud1.scaleTo(world, 1.0f).duration(1200).start()
cloud1.translateBy(world, y = -5f).duration(1200).toReverse().start()
cloud2.scaleTo(world, 1.0f).duration(1200).start()
cloud2.translateBy(world, y = 5f).delay(400).duration(1200).toReverse().start()
cloud3.scaleTo(world, 1.0f).duration(1200).start()
cloud3.translateBy(world, y=-5f).delay(600).duration(1200).toReverse().start()
}
override fun onDetachTo(world: World) {
cloud1.translateBy(world,x=-120f).duration(1200).onEnd {
container.remove()
}.start()
cloud2.translateBy(world,x=-120f).duration(1200).start()
cloud3.translateBy(world,x=120f).duration(1200).start()
}
} | 31.483871 | 87 | 0.589652 | 3.015625 |
74f7a9f85e2f12e783f94c8cb23678707be4a8b2 | 654 | asm | Assembly | NASM/SpojNASM/Basic/XOR.asm | gabriel88766/SmallProjects | deef95631369ac6378448ab3e8e1f52d7a78ea6b | [
"MIT"
] | 1 | 2021-08-28T17:22:57.000Z | 2021-08-28T17:22:57.000Z | NASM/SpojNASM/Basic/XOR.asm | gabriel88766/SmallProjects | deef95631369ac6378448ab3e8e1f52d7a78ea6b | [
"MIT"
] | null | null | null | NASM/SpojNASM/Basic/XOR.asm | gabriel88766/SmallProjects | deef95631369ac6378448ab3e8e1f52d7a78ea6b | [
"MIT"
] | null | null | null | ; https://www.spoj.com/problems/BSCXOR/
; Input X and Y, two numbers: "X Y"
; Output X^Y
;
global _start
section .data
section .bss ;Uninitialized data
line resb 3
resp resb 1
section .text
_start:
read1:
mov eax, 3
mov ebx, 0
mov ecx, line
mov edx, 3
int 80h
movzx eax, BYTE [line]
movzx ebx, BYTE [line+2]
add eax,ebx
mov edx,0
mov ebx, 2
div ebx
add edx, '0'
mov [resp], edx
write:
mov ecx, resp
mov eax, 4
mov ebx, 1
mov edx, 1
int 80h
mov eax,1 ; 'exit' system call
mov ebx,0 ; exit with error code 0
int 80h
| 13.914894 | 46 | 0.54893 | 3.078125 |
c526affb7d5de98fb84232129dd552fa2ec5252a | 3,791 | asm | Assembly | Max/min.asm | miguel2192/CSC211-Assembly | 39cd43026f07e443d9809fb7ec229303522ecab9 | [
"MIT"
] | null | null | null | Max/min.asm | miguel2192/CSC211-Assembly | 39cd43026f07e443d9809fb7ec229303522ecab9 | [
"MIT"
] | null | null | null | Max/min.asm | miguel2192/CSC211-Assembly | 39cd43026f07e443d9809fb7ec229303522ecab9 | [
"MIT"
] | null | null | null | ########### max.asm ##############################
# Miguel Rodriguez #
# max.asm #
# Description #
# Determines the max of two floating numbers #
# Program Logic #
# 1. Ask for A #
# 2. Ask for B #
# 3. Compare values #
# 4. Display the greater value #
# 5. reapeats the program using a while loop #
# 5. return to operating system #
##################################################
.text
.globl __start
__start: # get the values into registers
la $a0,promptA # prompt user for A
li $v0,4 # print string
syscall
li $v0,6 # read single
syscall # $f0 <-- x
mov.s $f4,$f0 #move f0 to f4
la $a0,promptB # prompt user for A
li $v0,4 # print string
syscall
li $v0,6 # read single
syscall # $f0 <-- x
mov.s $f6,$f0 #move f0 to f4
c.lt.s $f4,$f6 # is A < B?
bc1t printA # yes -- print A
c.lt.s $f6,$f4 # is B < A?
bc1t printB # yes -- print B
la $a0,EQmsg # otherwise
li $v0,4 # they are equal
syscall
mov.s $f12,$f4 # print one of them
b prtnum
printA: la $a0,Amsg # message for A
li $v0,4
syscall
mov.s $f12,$f6 # print A
b prtnum
printB: la $a0,Bmsg # message for B
li $v0,4
syscall
mov.s $f12,$f4 # print B
prtnum: li $v0,2 # print single precision
# value in $f12
syscall
la $a0,newl
li $v0,4 # print new line
syscall
###############################################################################
# WHILE LOOP
# CREATES REPETITION
###############################################################################
# Ask the user if that user wishes to repeat this program: 1-yes, 0-no
la $a0,p4
li $v0,4
syscall
# Enter an integer (1 or 0)
la $v0,5 # Load address of the message area
syscall
# Compare input to 1
beqz $v0,eop
# if it is 1 repeat
j __start
eop: li $v0,10 # End Of Program
syscall # Call to system
.data
promptA: .asciiz "Enter A: "
promptB: .asciiz "Enter B: "
Amsg: .asciiz "B is Larger: "
Bmsg: .asciiz "A is Larger: "
EQmsg: .asciiz "They are equal: "
newl: .asciiz "\n"
p4: .asciiz "\nWould you like to repeat the program: (1-yes, 0-no)"
############## Output ###########################################################
# Console
# =========================
# Enter A: 4.5
# Enter B: 8.7
# B is Larger: 8.69999981
#
# Would you like to repeat the program: (1-yes, 0-no)1
# Enter A: 3.3
# Enter B: 7.7
# B is Larger: 7.69999981
#
# Would you like to repeat the program: (1-yes, 0-no)1
# Enter A: 8.8
# Enter B: 1.1
# A is Larger: 8.80000019
#
# Would you like to repeat the program: (1-yes, 0-no)0
#
#
################################################################################## | 29.617188 | 88 | 0.363756 | 3.125 |
1685774039ca58fd0124e74dc4bbc3c0a0d3b484 | 1,921 | h | C | include/tweedledum/IR/Qubit.h | paniash/tweedledum | fe997bea3413a02033d76b20034e3a24b840bffb | [
"MIT"
] | 76 | 2018-07-21T08:12:17.000Z | 2022-01-25T06:22:25.000Z | include/tweedledum/IR/Qubit.h | paniash/tweedledum | fe997bea3413a02033d76b20034e3a24b840bffb | [
"MIT"
] | 44 | 2018-10-26T10:44:39.000Z | 2022-02-07T01:07:38.000Z | include/tweedledum/IR/Qubit.h | paniash/tweedledum | fe997bea3413a02033d76b20034e3a24b840bffb | [
"MIT"
] | 23 | 2018-09-27T15:28:48.000Z | 2022-03-07T12:21:37.000Z | /*------------------------------------------------------------------------------
| Part of Tweedledum Project. This file is distributed under the MIT License.
| See accompanying file /LICENSE for details.
*-----------------------------------------------------------------------------*/
#pragma once
#include <cassert>
#include <limits>
#include <string>
#include <vector>
namespace tweedledum {
class Qubit {
public:
enum Polarity : uint32_t
{
positive = 0u,
negative = 1u
};
// Return the sentinel value
static constexpr Qubit invalid()
{
return Qubit();
}
constexpr Qubit(uint32_t uid, Polarity polarity = Polarity::positive)
: uid_(uid)
, polarity_(static_cast<uint32_t>(polarity))
{}
constexpr Qubit(Qubit const& other) = default;
Qubit& operator=(Qubit const& other)
{
data_ = other.data_;
return *this;
}
uint32_t uid() const
{
return uid_;
}
Polarity polarity() const
{
return static_cast<Polarity>(polarity_);
}
Qubit operator!() const
{
Qubit complemented(*this);
complemented.polarity_ ^= 1u;
return complemented;
}
Qubit operator+() const
{
return Qubit(uid_, Polarity::positive);
}
Qubit operator-() const
{
return Qubit(uid_, Polarity::negative);
}
bool operator==(Qubit other) const
{
return data_ == other.data_;
}
bool operator!=(Qubit other) const
{
return data_ != other.data_;
}
operator uint32_t() const
{
return uid_;
}
protected:
union {
uint32_t data_;
struct {
uint32_t const uid_ : 31;
uint32_t polarity_ : 1;
};
};
private:
constexpr Qubit()
: data_(std::numeric_limits<uint32_t>::max())
{}
};
} // namespace tweedledum
| 19.40404 | 80 | 0.52785 | 3.0625 |
c310a18d5accf2c0cabd35c4376b3dad1321920e | 1,034 | go | Go | content/code/go-iterate-all-elements/handleHtmlLink.go | eduard1963/userpages | 98a00c9ef3d82363dba1a89375574daaa5a86d14 | [
"Unlicense"
] | 77 | 2015-07-18T15:56:54.000Z | 2022-03-10T06:22:06.000Z | content/code/go-iterate-all-elements/handleHtmlLink.go | vicmanbrile/userpages | 76d540b447ea0baccc91e1db5f9c22408fe420f4 | [
"Unlicense"
] | 5 | 2017-02-23T10:17:21.000Z | 2022-01-13T19:51:15.000Z | content/code/go-iterate-all-elements/handleHtmlLink.go | vicmanbrile/userpages | 76d540b447ea0baccc91e1db5f9c22408fe420f4 | [
"Unlicense"
] | 35 | 2015-04-22T05:10:00.000Z | 2022-01-21T12:34:50.000Z | package main
import (
"errors"
"fmt"
"golang.org/x/net/html"
"os"
"strings"
)
func isAnchorElement(n *html.Node) bool {
return n.Type == html.ElementNode && n.Data == "a"
}
func isTextNode(n *html.Node) bool {
return n.Type == html.TextNode
}
func isHasOnlyOneChild(n *html.Node) bool {
return n.FirstChild != nil && n.FirstChild == n.LastChild
}
func getAttribute(n *html.Node, key string) (string, error) {
for _, attr := range n.Attr {
if attr.Key == key {
return attr.Val, nil
}
}
return "", errors.New(key + " not exist in attribute!")
}
func printRstLink(n *html.Node) {
if !isHasOnlyOneChild(n) {
fmt.Fprintf(os.Stderr, "Child number of anchor is not 1\n")
return
}
if !isTextNode(n.FirstChild) {
fmt.Fprintf(os.Stderr, "Child of anchor is not TextNode\n")
return
}
text := strings.TrimSpace(n.FirstChild.Data)
href, err := getAttribute(n, "href")
if err != nil {
fmt.Fprintf(os.Stderr, err.Error())
return
}
rstLink := "`" + text + " <" + href + ">`__"
fmt.Println(rstLink)
}
| 19.148148 | 61 | 0.648936 | 3.125 |
7e6f37b7f882863d339e0a504bceebce9b16dadb | 1,615 | lua | Lua | learn-corona-sdk-game-dev/chapter-11-code/listing_11-2.lua | portal-webstore/books | 689eb33ca0ef96d459e7cf3f3b15f35383b8c601 | [
"MIT"
] | null | null | null | learn-corona-sdk-game-dev/chapter-11-code/listing_11-2.lua | portal-webstore/books | 689eb33ca0ef96d459e7cf3f3b15f35383b8c601 | [
"MIT"
] | null | null | null | learn-corona-sdk-game-dev/chapter-11-code/listing_11-2.lua | portal-webstore/books | 689eb33ca0ef96d459e7cf3f3b15f35383b8c601 | [
"MIT"
] | null | null | null | function utils:showFPSAndMem()
local prevTime = 0;
local curTime = 0;
local dt = 0;
local fps = 60;
local mem = 0;
local frameCount = 0;
local avg = 0;
local slowest = 1000;
underlay = display.newRect(
0, display.contentHeight - 30, display.contentWidth, 34
);
underlay:setReferencePoint(display.TopLeftReferencePoint);
underlay:setFillColor(0, 0, 0, 128);
displayInfo = display.newText(
"FPS: ??, Avg: ?, Slowest: ?, Mem: ????mb", 0, 0, native.systemFontBold, 20
);
displayInfo.x = display.contentWidth / 2;
displayInfo.y = display.contentHeight - 14;
local function updateText()
curTime = system.getTimer();
dt = curTime - prevTime;
prevTime = curTime;
fps = math.floor(1000 / dt);
mem = system.getInfo("textureMemoryUsed") / 1000000;
if fps > 60 then
fps = 60
end
frameCount = frameCount + 1;
if frameCount > 150 then
avg = avg + fps;
if fps < slowest then
slowest = fps;
end
end
local a = math.round(avg / (frameCount - 150));
a = math.floor(a * math.pow(10, 0) + 0.5) / math.pow(10, 0);
collectgarbage();
local sysMem = collectgarbage("count") * 0.001;
sysMem = math.floor(sysMem * 1000) * 0.001;
displayInfo.text = "FPS: " .. fps .. ", Avg: " .. a ..
", Slowest: " .. slowest ..
", T-Mem: " .. string.sub(mem, 1, string.len(mem) - 4) .. "mb" ..
", S-Mem: " .. sysMem .. "mb";
underlay:toFront()
displayInfo:toFront()
end
underlay.isVisible = true;
displayInfo.isVisible = true;
Runtime:addEventListener("enterFrame", updateText)
end
| 29.907407 | 79 | 0.609288 | 3.15625 |
2f72ba28d9cb6ac5fe8a0b376754d94338a16de4 | 1,156 | rs | Rust | src/bin/parol/tools/follow.rs | jsinger67/parol | 80fd8e97d3659b3a3a0c6f87f8c089acd04e5f84 | [
"MIT"
] | 9 | 2021-11-04T00:57:09.000Z | 2022-03-09T16:15:21.000Z | src/bin/parol/tools/follow.rs | jsinger67/parol | 80fd8e97d3659b3a3a0c6f87f8c089acd04e5f84 | [
"MIT"
] | 10 | 2021-12-27T08:06:01.000Z | 2022-01-23T10:42:31.000Z | src/bin/parol/tools/follow.rs | jsinger67/parol | 80fd8e97d3659b3a3a0c6f87f8c089acd04e5f84 | [
"MIT"
] | 2 | 2021-12-30T05:34:04.000Z | 2022-01-22T17:52:24.000Z | use miette::{bail, Result};
use parol::analysis::follow_k;
use parol::analysis::FirstCache;
use parol::generators::generate_terminal_names;
use parol::{obtain_grammar_config, MAX_K};
use std::path::PathBuf;
/// Calculates the FOLLOW(k) sets for each non-terminal.
#[derive(clap::Parser)]
#[clap(name = "follow")]
pub struct Args {
/// The grammar file to use
#[clap(short = 'f', long = "grammar-file", parse(from_os_str))]
grammar_file: PathBuf,
/// The maximum number of lookahead tokens to be used
#[clap(short = 'k', long = "lookahead", default_value = "1")]
lookahead: usize,
}
pub fn main(args: &Args) -> Result<()> {
let file_name = &args.grammar_file;
let grammar_config = obtain_grammar_config(&file_name, true)?;
let max_k = args.lookahead;
if max_k > MAX_K {
bail!("Maximum lookahead is {}", MAX_K);
}
let terminals = generate_terminal_names(&grammar_config);
let first_cache = FirstCache::new();
let follow_k = follow_k(&grammar_config, max_k, &first_cache);
for (nt, fo) in follow_k.iter() {
println!(" {}: {}", nt, fo.to_string(&terminals));
}
Ok(())
}
| 30.421053 | 67 | 0.653979 | 3.015625 |
2104689fc7f2ffa86469a776372852ba26bb8302 | 1,857 | rs | Rust | crate/state_play/src/system/state_id_event_system.rs | Lighty0410/autexousious | 99d142d8fdbf2076f3fd929f61b8140d47cf6b86 | [
"Apache-2.0",
"MIT"
] | 41 | 2020-03-13T04:45:03.000Z | 2022-01-17T18:13:09.000Z | crate/state_play/src/system/state_id_event_system.rs | Lighty0410/autexousious | 99d142d8fdbf2076f3fd929f61b8140d47cf6b86 | [
"Apache-2.0",
"MIT"
] | 61 | 2016-06-19T01:28:12.000Z | 2021-07-17T08:21:44.000Z | crate/state_play/src/system/state_id_event_system.rs | Lighty0410/autexousious | 99d142d8fdbf2076f3fd929f61b8140d47cf6b86 | [
"Apache-2.0",
"MIT"
] | 3 | 2020-03-21T21:53:36.000Z | 2021-01-30T01:10:55.000Z | use amethyst::{
ecs::{ReadExpect, System, World, Write},
shred::{ResourceId, SystemData},
shrev::EventChannel,
};
use derivative::Derivative;
use derive_new::new;
use log::debug;
use state_registry::{StateId, StateIdUpdateEvent};
use tracker::Prev;
/// Emits `StateIdUpdateEvent`s when the `StateId` changes.
#[derive(Debug, Default, new)]
pub struct StateIdEventSystem;
/// `StateIdEventSystemData`.
#[derive(Derivative, SystemData)]
#[derivative(Debug)]
pub struct StateIdEventSystemData<'s> {
/// `StateId` resource.
#[derivative(Debug = "ignore")]
pub state_id: Option<ReadExpect<'s, StateId>>,
/// `Prev<StateId>` resource.
#[derivative(Debug = "ignore")]
pub state_id_prev: Option<ReadExpect<'s, Prev<StateId>>>,
/// `StateIdUpdateEvent` channel.
#[derivative(Debug = "ignore")]
pub state_id_update_ec: Write<'s, EventChannel<StateIdUpdateEvent>>,
}
impl<'s> System<'s> for StateIdEventSystem {
type SystemData = StateIdEventSystemData<'s>;
fn run(
&mut self,
StateIdEventSystemData {
state_id,
state_id_prev,
mut state_id_update_ec,
}: Self::SystemData,
) {
if let Some(state_id) = state_id {
let state_id = *state_id;
let state_id_prev = state_id_prev.map(|state_id_prev| **state_id_prev);
// Send event when `state_id_prev` is `None`, or when it differs from `state_id`.
if state_id_prev
.map(|state_id_prev| state_id != state_id_prev)
.unwrap_or(true)
{
let state_id_update_event = StateIdUpdateEvent::new(state_id, state_id_prev);
debug!("Sending event: {:?}", state_id_update_event);
state_id_update_ec.single_write(state_id_update_event);
}
}
}
}
| 32.017241 | 93 | 0.632741 | 3.09375 |
feb4363a02c7a9a4c0bb028597ed04facbbde4bf | 4,646 | lua | Lua | tabuleiro.lua | Mario-Nakazato/jogoVelhaMultiplayer | 00a5583b8223cdbef79dce5955a52d027fadae6d | [
"Apache-2.0"
] | 1 | 2022-02-26T03:37:02.000Z | 2022-02-26T03:37:02.000Z | tabuleiro.lua | Mario-Nakazato/jogoVelhaMultiplayer | 00a5583b8223cdbef79dce5955a52d027fadae6d | [
"Apache-2.0"
] | null | null | null | tabuleiro.lua | Mario-Nakazato/jogoVelhaMultiplayer | 00a5583b8223cdbef79dce5955a52d027fadae6d | [
"Apache-2.0"
] | null | null | null | --[[
Tabuleiro
# .
--]] require "palavra"
local function novo(x, y, tamanho, jogador)
local tabuleiro = {
x = x or 0,
y = y or 0,
tamanho = tamanho or 128,
jogador = jogador or "X",
jogada = 9,
venceu = nil,
quadrado = {
[1] = {"", "", ""},
[2] = {"", "", ""},
[3] = {"", "", ""}
}
}
tabuleiro.fonte = lgrafico.newFont(tabuleiro.tamanho)
function tabuleiro:load()
end
function tabuleiro:update(dt)
end
function tabuleiro:draw()
lgrafico.setLineWidth(self.tamanho / 32)
lgrafico.line(self.x, self.y + self.tamanho, self.x + self.tamanho * 3, self.y + self.tamanho)
lgrafico.line(self.x, self.y + self.tamanho * 2, self.x + self.tamanho * 3, self.y + self.tamanho * 2)
lgrafico.line(self.x + self.tamanho, self.y, self.x + self.tamanho, self.y + self.tamanho * 3)
lgrafico.line(self.x + self.tamanho * 2, self.y, self.x + self.tamanho * 2, self.y + self.tamanho * 3)
for i = 0, 2 do
for j = 0, 2 do
lgrafico.print(self.quadrado[i + 1][j + 1], self.fonte, self.x + self.tamanho * j + self.tamanho / 8,
self.y + self.tamanho * i - self.tamanho / 16)
end
end
--[[
lgrafico.rectangle("line", self.x, self.y -self.tamanho, self.tamanho *3, self.tamanho)
lgrafico.rectangle("line", self.x, self.y +self.tamanho *3, self.tamanho *3, self.tamanho)
lgrafico.print("Mário X", lgrafico.newFont(32), self.x +self.tamanho /8, self.y -self.tamanho +self.tamanho /16)
if self.venceu == "X" or self.venceu == "O" then
lgrafico.print("Vencedor: " ..tostring(self.venceu), lgrafico.newFont(32), self.x +self.tamanho /8, self.y -self.tamanho /2 +self.tamanho /16)
elseif self.venceu == "Velha" then
lgrafico.print(self.venceu, lgrafico.newFont(32), self.x +self.tamanho +self.tamanho /8, self.y -self.tamanho /2 +self.tamanho /16)
end
lgrafico.print("Bruno O", lgrafico.newFont(32), self.x +self.tamanho *2 -self.tamanho /8, self.y -self.tamanho +self.tamanho /16)
]] --
end
function tabuleiro:jogar(i, j, p)
if i and j and self.jogada > 0 and self.quadrado[i][j] == "" then
self.jogada = self.jogada - 1
self.quadrado[i][j] = p or self.jogador
self.jogador = self.jogador == "X" and "O" or "X"
end
return i, j, self:checar()
end
function tabuleiro:selecionar(x, y)
for j = 1, 3 do
for i = 1, 3 do
if self.x + self.tamanho * (j - 1) < x and self.x + self.tamanho * j > x then
if self.y + self.tamanho * (i - 1) < y and self.y + self.tamanho * i > y then
return i, j
end
end
end
end
end
function tabuleiro:checar()
-- self.venceu = nil
for i = 1, 3 do -- Horizontal e Vertical
if self.quadrado[i][1] ~= "" and self.quadrado[i][1] == self.quadrado[i][2] and self.quadrado[i][1] ==
self.quadrado[i][3] then
self.venceu = self.quadrado[i][1]
break
elseif self.quadrado[1][i] ~= "" and self.quadrado[1][i] == self.quadrado[2][i] and self.quadrado[1][i] ==
self.quadrado[3][i] then
self.venceu = self.quadrado[1][i]
break
end
end
-- Diagonal
if self.quadrado[1][1] ~= "" and self.quadrado[1][1] == self.quadrado[2][2] and self.quadrado[1][1] ==
self.quadrado[3][3] then
self.venceu = self.quadrado[1][1];
elseif self.quadrado[3][1] ~= "" and self.quadrado[3][1] == self.quadrado[2][2] and self.quadrado[3][1] ==
self.quadrado[1][3] then
self.venceu = self.quadrado[3][1];
end
if self.venceu then
self.jogada = 0
end
if self.venceu == nil and self.jogada == 0 then
self.venceu = "Velha"
end
return self.venceu
end
function tabuleiro:mousepressed(x, y, botao, toque, repeticao)
--print(x,y,botao,toque,repeticao)
if self.jogada > 0 then
-- p = botao == 1 and "X" or botao == 2 and "O" or ""
i, j = self:selecionar(x, y)
end
return self:jogar(i, j, p)
end
return tabuleiro
end
tabuleiro = {
novo = novo
}
print("tabuleiro.lua")
return tabuleiro
| 31.821918 | 158 | 0.522815 | 3 |
e731212e08145b2ed50c248d854bb965f757e62d | 1,773 | js | JavaScript | src/services/metric.js | media-network/media-api | 292fbf8eb2c490dd2c39d376979b70f0dbde3ac5 | [
"MIT"
] | 1 | 2020-05-18T09:45:29.000Z | 2020-05-18T09:45:29.000Z | src/services/metric.js | media-network/media-api | 292fbf8eb2c490dd2c39d376979b70f0dbde3ac5 | [
"MIT"
] | null | null | null | src/services/metric.js | media-network/media-api | 292fbf8eb2c490dd2c39d376979b70f0dbde3ac5 | [
"MIT"
] | 1 | 2022-01-03T07:15:40.000Z | 2022-01-03T07:15:40.000Z | import elasticsearchService from 'services/elasticsearch'
import elasticsearchInfra from 'infrastructure/elasticsearch'
import config from 'infrastructure/config'
import mapping from 'mapping/metric'
const DATAPOINT_VERSION = config.elasticsearch.datapointVersion
const head = async (projectIdentifier, metricName, timestamp) => {
return await elasticsearchService.head(
`${ DATAPOINT_VERSION }-${ projectIdentifier }-${ metricName }`,
metricName,
timestamp
)
}
const update = async (projectIdentifier, metricName, data) => {
if (!projectIdentifier || !metricName) {
return null
}
const result = await data.map(async ({ timestamp, value }) => {
return await elasticsearchService.create(
`${ DATAPOINT_VERSION }-${ projectIdentifier }-${ metricName }`,
metricName,
timestamp,
mapping,
{ timestamp: new Date(timestamp), value }
)
})
if (!result) {
return null
}
return data
}
const get = async (projectIdentifier, metricName, data) => {
if (!projectIdentifier || !metricName) {
return null
}
const { startTime, endTime, period, from, size = 500 } = data
const { hits } = await elasticsearchInfra.searchWithParams(
`${ DATAPOINT_VERSION }-${ projectIdentifier }-${ metricName }`,
metricName,
{
bool: {
must: {
range: {
timestamp: {
gte: Date.parse(startTime),
lte: Date.parse(endTime)
}
}
}
}
},
{
from,
size
}
)
if (!hits || !hits.hits.length) {
return {
listData: [],
total: 0
}
}
return {
listData: hits.hits,
total: hits.total
}
}
export default {
get,
update,
head
}
| 21.107143 | 70 | 0.602933 | 3.15625 |
f031c64cd48b598cd3b616708c05819e454b8bc1 | 2,870 | py | Python | core/translator.py | bfu4/mdis | fac5ec078ffeaa9339df4b31b9b71140563f4f14 | [
"MIT"
] | 13 | 2021-05-17T06:38:50.000Z | 2022-03-27T15:39:57.000Z | core/translator.py | bfu4/mdis | fac5ec078ffeaa9339df4b31b9b71140563f4f14 | [
"MIT"
] | null | null | null | core/translator.py | bfu4/mdis | fac5ec078ffeaa9339df4b31b9b71140563f4f14 | [
"MIT"
] | null | null | null | from typing import List
from parser import parse_bytes, split_bytes_from_lines, get_bytes, parse_instruction_set, wrap_parsed_set
from reader import dump_file_hex_with_locs
class Translator:
"""
Class handling file translations from *.mpy to hex dumps and opcodes
"""
def __init__(self, file: str):
"""
Create new translator
:param file: location of the file
"""
self.file = file
def get_file_hex(self):
"""
Get a full hex dump of the file
:return:
"""
return dump_file_hex_with_locs(self.file)
def get_file_hex_at(self, _from: str, _to: str):
"""
Get a byte dump at a specified location
:param _from: from address
:param _to: to address
:return: bytes from address {_from} to address {_to}
"""
return parse_bytes(self.get_file_hex(), _from, _to)
def get_file(self):
"""
Get the file name
:return:
"""
return self.file
def get_magic(self) -> str:
"""
Get the magic number
:return:
"""
return "".join(self.get_all_bytes()[0][:8])
def get_all_bytes(self):
"""
Get all of the bytes
:return: all of the bytes
"""
return get_bytes(self.get_file_hex().split("\n"))
def get_split_bytes(self) -> List[List[str]]:
"""
Get all of the bytes per line
:return: bytes in list form
"""
split = split_bytes_from_lines(self.get_all_bytes())
split[0] = split[0][4:]
return split
def get_bytes_at(self, _from: str, _to: str) -> List[List[str]]:
"""
Get the bytes between the specified locations
:param _from: start address
:param _to: end address
:return: bytes
"""
return split_bytes_from_lines(self.get_file_hex_at(_from, _to))
def get_instruction_set(self) -> List[str]:
"""
Get the file's instruction set
:return: set
"""
bl = self.get_split_bytes()
# offset of 8, start at first BC_BASE_RESERVED
list_with_offset = bl[0][4:]
_bytes = self.__flatten([list_with_offset, bl[1]])
_set = parse_instruction_set(_bytes)
return wrap_parsed_set(_set)
def get_instructions_at(self, _from: str, _to: str) -> List[str]:
"""
Get the instructions between addresses
:param _from: start address
:param _to: end address
:return: instructions
"""
_bytes = self.__flatten(self.get_bytes_at(_from, _to))
_set = parse_instruction_set(_bytes)
return wrap_parsed_set(_set)
def __flatten(self, _list):
# Lambda replaced by def flatten due to E731
return [item for sublist in _list for item in sublist]
| 28.7 | 105 | 0.591289 | 3.40625 |
643b6b320d6dcee16060090daaeb32870d38900b | 2,905 | rs | Rust | src/lib.rs | KJ002/anagram_solver | 5c8c86fb19211d414acf9593fb4ceb6e2c2d9930 | [
"MIT"
] | null | null | null | src/lib.rs | KJ002/anagram_solver | 5c8c86fb19211d414acf9593fb4ceb6e2c2d9930 | [
"MIT"
] | null | null | null | src/lib.rs | KJ002/anagram_solver | 5c8c86fb19211d414acf9593fb4ceb6e2c2d9930 | [
"MIT"
] | null | null | null | use itertools::Itertools;
use std::cmp::{Ordering, Reverse};
use std::fs;
use std::thread;
use pyo3::prelude::*;
fn contains_any_characters(word: &str, characters: Vec<char>) -> bool {
for character in characters {
if word
.to_lowercase()
.contains(&character.to_lowercase().to_string())
{
return true;
}
}
false
}
fn binary_search(word: &str, words: &[String]) -> bool {
if words.len() <= 20 {
return words.iter().any(|x| x == word);
}
let centre_index = (words.len() - 1) / 2;
if word == words[centre_index] {
return true;
}
match word.cmp(&words[centre_index]) {
Ordering::Greater => binary_search(word, &words[centre_index..]),
Ordering::Less => binary_search(word, &words[..centre_index]),
_ => panic!(),
}
}
fn all_lengths(anagram: &str, max: &usize, min: &usize) -> Vec<Vec<char>> {
if *max <= *min {
return anagram.chars().permutations(*max).unique().collect_vec();
}
let mut result: Vec<Vec<char>> = Vec::new();
result.append(&mut anagram.chars().permutations(*max).unique().collect_vec());
result.append(&mut all_lengths(anagram, &(max - 1), &min));
result
}
fn threader(anagram: &str, max: usize, min: usize) -> Vec<Vec<char>> {
let mut handles = vec![];
{
let max = if max > 6 {
6
} else {
max.clone()
};
let anagram = anagram.to_string();
let handle = thread::spawn(move || {
all_lengths(&anagram, &max, &min)
});
handles.push(handle);
}
for n in 7..max+1 {
let anagram = anagram.to_string();
let handle = thread::spawn(move || {
all_lengths(&anagram, &n, &n)
});
handles.push(handle);
}
let mut result = vec![];
for handle in handles {
result.append(&mut handle.join().unwrap());
}
result
}
#[pyfunction]
fn solve_anagram(anagram: &str, max: usize, min: usize) -> PyResult<Vec<String>>{
let letters: Vec<Vec<char>> = threader(&anagram, max, min);
let words: Vec<String> = fs::read_to_string("words.txt")
.expect("Couldn't open words.txt. Does it exist?")
.split('\n')
.map(String::from)
.collect();
let mut solved: Vec<String> = Vec::new();
for perm in letters {
let result = perm.into_iter().collect::<String>();
if contains_any_characters(&result, vec!['a', 'e', 'i', 'o', 'y'])
&& !solved.iter().any(|x| x == &result)
&& binary_search(&result, &words)
{
solved.push(result);
}
}
solved.sort_by_key(|a| Reverse(a.len()));
Ok(solved)
}
#[pymodule]
fn anagram_solver(_py: Python, m: &PyModule) -> PyResult<()> {
m.add_function(wrap_pyfunction!(solve_anagram, m)?)?;
Ok(())
}
| 24.008264 | 82 | 0.549053 | 3.296875 |
e725ca8b93021d84822bf6d286bdba3a6bcfd6e6 | 3,120 | js | JavaScript | app/components/Player/PlayerComponent.js | popcorn-official/popcorn-time-desktop | 2dcdc61d0d22ecc6f46d85bd61b6f0c55b6b0d32 | [
"MIT"
] | 9 | 2020-03-28T14:21:31.000Z | 2021-07-30T22:22:00.000Z | app/components/Player/PlayerComponent.js | TriPSs/popcorn-time-desktop | 2dcdc61d0d22ecc6f46d85bd61b6f0c55b6b0d32 | [
"MIT"
] | 21 | 2017-07-10T08:12:09.000Z | 2017-08-18T12:22:47.000Z | app/components/Player/PlayerComponent.js | popcorn-official/popcorn-time-desktop | 2dcdc61d0d22ecc6f46d85bd61b6f0c55b6b0d32 | [
"MIT"
] | 10 | 2020-05-06T07:43:32.000Z | 2022-01-14T16:49:49.000Z | // @flow
import React from 'react'
import classNames from 'classnames'
import Player from 'api/Player'
import * as PlayerConstants from 'api/Player/PlayerConstants'
import * as TorrentConstants from 'api/Torrent/TorrentConstants'
import type { Props } from './PlayerTypes'
import classes from './Player.scss'
import Stats from './Stats'
import Controls from './Controls'
import Progress from './Progress'
export default class extends React.Component {
props: Props
componentWillUnmount() {
Player.destroy()
}
isHidden = () => {
const { torrentStatus } = this.props
if (this.shouldShowPlayer()) {
return false
}
return torrentStatus === TorrentConstants.STATUS_NONE
}
shouldShowPlayer = () => {
const { playerStatus, playerAction } = this.props
return playerStatus !== PlayerConstants.STATUS_NONE
&& playerStatus !== PlayerConstants.STATUS_ENDED
&& playerAction !== PlayerConstants.ACTION_STOP
}
shouldShowControls = () => {
const { playerProvider, playerStatus } = this.props
if (playerProvider === PlayerConstants.PROVIDER_PLYR) {
return false
}
return playerStatus === PlayerConstants.STATUS_PLAYING || playerStatus === PlayerConstants.STATUS_PAUSED
}
renderVideo = () => {
const { uri, stop, playerStatus } = this.props
return (
<div
style={{
position : this.shouldShowPlayer() ? 'fixed' : 'inherit',
visibility: this.shouldShowPlayer() ? 'inherit' : 'hidden',
display : uri ? 'inherit' : 'none',
}}
className={classNames(classes.plyr, {
[classes['plyr--playing']]: playerStatus === PlayerConstants.STATUS_PLAYING,
[classes['plyr--paused']] : playerStatus === PlayerConstants.STATUS_PAUSED,
})}>
<button
className={classNames(
classes.player__close,
'pct-btn pct-btn-trans pct-btn-outline pct-btn-round')}
onClick={stop}>
<i className={'ion-ios-arrow-back'} />
Close
</button>
<video controls>
<track kind={'captions'} />
</video>
</div>
)
}
render() {
const { playerProvider, playerStatus } = this.props
const { stop, torrentStatus } = this.props
return (
<div
className={classNames({
'animated fadeIn' : !this.isHidden(),
[classes['player--hidden']]: this.isHidden(),
}, classes.player)}>
{torrentStatus !== TorrentConstants.STATUS_NONE && (
<Stats {...{
playerProvider,
playerStatus,
torrentStatus,
stop,
}} />
)}
<div className={classNames(classes.player__controls, {
'animated fadeIn': !this.isHidden(),
})}>
{this.shouldShowControls() && (
<Controls />
)}
{this.shouldShowControls() && (
<Progress />
)}
</div>
{playerProvider === PlayerConstants.PROVIDER_PLYR && this.renderVideo()}
</div>
)
}
}
| 26.218487 | 108 | 0.583333 | 3.171875 |
c3dfda3fd9302489e3607feef4f356f8a71fa4c0 | 12,508 | go | Go | session_handshake.go | moroen/dtls | 8868b17f6698d0759906307ebf0cb0aca15ff499 | [
"MIT"
] | null | null | null | session_handshake.go | moroen/dtls | 8868b17f6698d0759906307ebf0cb0aca15ff499 | [
"MIT"
] | null | null | null | session_handshake.go | moroen/dtls | 8868b17f6698d0759906307ebf0cb0aca15ff499 | [
"MIT"
] | null | null | null | package dtls
import (
"bytes"
"encoding/hex"
"errors"
"reflect"
"time"
)
func (s *session) parseRecord(data []byte) (*record, []byte, error) {
rec, rem, err := parseRecord(data)
if err != nil {
logWarn(s.peer.String(), "dtls: parse record: %s", err.Error())
return nil, nil, err
}
if s.decrypt {
if s.KeyBlock == nil {
logWarn(s.peer.String(), "dtls: tried to decrypt but KeyBlock not initialized.")
return nil, nil, errors.New("dtls: key block not initialized")
}
if len(rec.Data) < 8 {
if rec.IsAlert() {
// we were expecting encryption, but received an unencrypted alert message.
logDebug(s.peer.String(), "dtls: read %s (rem:%d) (decrypted:not-applicable-alert)", rec.Print(), len(rem))
return rec, rem, nil
} else {
logWarn(s.peer.String(), "dtls: data underflow, expected at least 8 bytes, but received %d.", len(rec.Data))
return nil, nil, errors.New("dtls: data underflow, expected at least 8 bytes")
}
}
var iv []byte
var key []byte
if s.Type == SessionType_Client {
iv = s.KeyBlock.ServerIV
key = s.KeyBlock.ServerWriteKey
} else {
iv = s.KeyBlock.ClientIV
key = s.KeyBlock.ClientWriteKey
}
nonce := newNonceFromBytes(iv, rec.Data[:8])
aad := newAad(rec.Epoch, rec.Sequence, uint8(rec.ContentType), uint16(len(rec.Data)-16))
clearText, err := dataDecrypt(rec.Data[8:], nonce, key, aad, s.peer.String())
if err != nil {
if s.handshake.firstDecrypt {
//callback that psk is invalid
logWarn(s.peer.String(), "dtls: PSK is most likely invalid for identity: %s%s", s.Server.Identity, s.Client.Identity)
s.handshake.firstDecrypt = false
}
if rec.IsHandshake() {
logDebug(s.peer.String(), "dtls: read %s (rem:%d) (decrypted:not-applicable): %s", rec.Print(), len(rem), err.Error())
return rec, rem, nil
} else {
logWarn(s.peer.String(), "dtls: read decryption error: %s", err.Error())
return nil, nil, err
}
}
if s.handshake.firstDecrypt {
s.handshake.firstDecrypt = false
}
rec.SetData(clearText)
}
logDebug(s.peer.String(), "dtls: read %s (rem:%d) (decrypted:%t)", rec.Print(), len(rem), s.decrypt)
return rec, rem, nil
}
func (s *session) parseHandshake(data []byte) (*handshake, error) {
hs, err := parseHandshake(data)
s.updateHash(data)
if err != nil {
return nil, err
}
logDebug(s.peer.String(), "dtls: read %s", hs.Print())
return hs, err
}
func (s *session) writeHandshake(hs *handshake) error {
hs.Header.Sequence = s.handshake.seq
s.handshake.seq += 1
rec := newRecord(ContentType_Handshake, s.getEpoch(), s.getNextSequence(), hs.Bytes())
s.updateHash(rec.Data)
logDebug(s.peer.String(), "dtls: write (handshake) %s", hs.Print())
return s.writeRecord(rec)
}
func (s *session) writeHandshakes(hss []*handshake) error {
recs := make([]*record, len(hss))
for idx, hs := range hss {
hs.Header.Sequence = s.handshake.seq
s.handshake.seq += 1
rec := newRecord(ContentType_Handshake, s.getEpoch(), s.getNextSequence(), hs.Bytes())
s.updateHash(rec.Data)
logDebug(s.peer.String(), "dtls: write (handshake) %s", hs.Print())
recs[idx] = rec
}
return s.writeRecords(recs)
}
func (s *session) writeRecord(rec *record) error {
if s.encrypt {
var iv []byte
var key []byte
if s.Type == SessionType_Client {
iv = s.KeyBlock.ClientIV
key = s.KeyBlock.ClientWriteKey
} else {
iv = s.KeyBlock.ServerIV
key = s.KeyBlock.ServerWriteKey
}
nonce := newNonce(iv, rec.Epoch, rec.Sequence)
aad := newAad(rec.Epoch, rec.Sequence, uint8(rec.ContentType), uint16(len(rec.Data)))
cipherText, err := dataEncrypt(rec.Data, nonce, key, aad, s.peer.String())
if err != nil {
return err
}
w := newByteWriter()
w.PutUint16(rec.Epoch)
w.PutUint48(rec.Sequence)
w.PutBytes(cipherText)
rec.SetData(w.Bytes())
logDebug(s.peer.String(), "dtls: write (encrptyed) %s", rec.Print())
return s.peer.WritePacket(rec.Bytes())
} else {
logDebug(s.peer.String(), "dtls: write (unencrypted) %s", rec.Print())
return s.peer.WritePacket(rec.Bytes())
}
}
func (s *session) writeRecords(recs []*record) error {
if s.encrypt {
return errors.New("dtls: can't write multiple encrypted records.")
} else {
buf := bytes.Buffer{}
for _, rec := range recs {
logDebug(s.peer.String(), "dtls: write (unencrypted) %s", rec.Print())
buf.Write(rec.Bytes())
}
return s.peer.WritePacket(buf.Bytes())
}
}
func (s *session) generateCookie() {
s.handshake.cookie = randomBytes(16)
}
func (s *session) startHandshake() error {
reqHs := newHandshake(handshakeType_ClientHello)
reqHs.ClientHello.Init(s.Id, s.Client.Random, nil, s.cipherSuites, s.compressionMethods)
err := s.writeHandshake(reqHs)
if err != nil {
return err
}
return nil
}
func (s *session) waitForHandshake(timeout time.Duration) error {
if s.handshake.done == nil {
return errors.New("dtls: handshake not in-progress")
}
select {
case err := <-s.handshake.done:
if s.handshake.state == "finished" {
return nil
} else {
return err
}
case <-time.After(timeout):
return errors.New("dtls: timed out waiting for handshake to complete")
}
return errors.New("dtls: unknown wait error")
}
func (s *session) processHandshakePacket(rspRec *record) error {
var reqHs, rspHs *handshake
var err error
switch rspRec.ContentType {
case ContentType_Handshake:
rspHs, err = s.parseHandshake(rspRec.Data)
if err != nil {
return err
}
if s.isHandshakeDone() && rspHs.Header.HandshakeType != handshakeType_ClientHello {
return errors.New("dtls: handshake packet received after handshake is complete")
}
switch rspHs.Header.HandshakeType {
case handshakeType_ClientHello:
cookie := rspHs.ClientHello.GetCookie()
if len(cookie) == 0 {
s.reset()
s.generateCookie()
s.sequenceNumber = uint64(rspHs.Header.Sequence)
s.handshake.seq = rspHs.Header.Sequence
s.handshake.state = "recv-clienthello-initial"
s.started = time.Now()
} else {
if !reflect.DeepEqual(cookie, s.handshake.cookie) {
s.handshake.state = "failed"
err = errors.New("dtls: cookie in clienthello does not match")
break
}
s.Client.RandomTime, s.Client.Random = rspHs.ClientHello.GetRandom()
if rspHs.ClientHello.HasSessionId() {
//resuming a session
s.Client.Identity = getIdentityFromCache(rspHs.ClientHello.GetSessionIdStr())
if len(s.Client.Identity) > 0 {
s.Id = rspHs.ClientHello.GetSessionId()
logDebug(s.peer.String(), "dtls: resuming previously established session, set identity: %s", s.Client.Identity)
s.resumed = true
psk := GetPskFromKeystore(s.Client.Identity, s.peer.String())
if psk == nil {
err = errors.New("dtls: no valid psk for identity")
break
}
s.Psk = psk
s.initKeyBlock()
} else {
logDebug(s.peer.String(), "dtls: tried to resume session, but it was not found")
s.resumed = false
}
} else {
s.resumed = false
}
s.handshake.state = "recv-clienthello"
}
case handshakeType_HelloVerifyRequest:
if len(s.handshake.cookie) == 0 {
s.handshake.cookie = rspHs.HelloVerifyRequest.GetCookie()
s.resetHash()
s.handshake.state = "recv-helloverifyrequest"
} else {
s.handshake.state = "failed"
err = errors.New("dtls: received hello verify request, but already have cookie")
break
}
s.handshake.state = "recv-helloverifyrequest"
case handshakeType_ServerHello:
s.Server.RandomTime, s.Server.Random = rspHs.ServerHello.GetRandom()
if reflect.DeepEqual(s.Id, rspHs.ServerHello.GetSessionId()) {
//resuming session
s.resumed = true
} else {
s.Id = rspHs.ServerHello.GetSessionId()
}
s.handshake.state = "recv-serverhello"
case handshakeType_ClientKeyExchange:
s.Client.Identity = string(rspHs.ClientKeyExchange.GetIdentity())
psk := GetPskFromKeystore(s.Client.Identity, s.peer.String())
if psk == nil {
err = errors.New("dtls: no valid psk for identity")
break
}
s.Psk = psk
s.initKeyBlock()
s.handshake.state = "recv-clientkeyexchange"
//TODO fail here if identity isn't found
case handshakeType_ServerKeyExchange:
s.Server.Identity = string(rspHs.ServerKeyExchange.GetIdentity())
s.handshake.state = "recv-serverkeyexchange"
case handshakeType_ServerHelloDone:
s.handshake.state = "recv-serverhellodone"
case handshakeType_Finished:
var label string
if s.Type == SessionType_Client {
label = "server"
} else {
label = "client"
}
if rspHs.Finished.Match(s.KeyBlock.MasterSecret, s.handshake.savedHash, label) {
if s.Type == SessionType_Server {
setIdentityToCache(hex.EncodeToString(s.Id), s.Client.Identity)
}
logDebug(s.peer.String(), "dtls: encryption matches, handshake complete")
} else {
s.handshake.state = "failed"
err = errors.New("dtls: crypto verification failed")
break
}
s.handshake.state = "finished"
break
default:
logWarn(s.peer.String(), "dtls: invalid handshake type [%v] received", rspRec.ContentType)
err = errors.New("dtls: bad handshake type")
break
}
case ContentType_ChangeCipherSpec:
s.decrypt = true
s.handshake.firstDecrypt = true
s.handshake.savedHash = s.getHash()
s.handshake.state = "cipherchangespec"
}
if err == nil {
switch s.handshake.state {
case "recv-clienthello-initial":
reqHs = newHandshake(handshakeType_HelloVerifyRequest)
reqHs.HelloVerifyRequest.Init(s.handshake.cookie)
err = s.writeHandshake(reqHs)
if err != nil {
break
}
s.resetHash()
case "recv-clienthello":
//TODO consider adding serverkeyexchange, not sure what to recommend as a server identity
reqHs = newHandshake(handshakeType_ServerHello)
reqHs.ServerHello.Init(s.Server.Random, s.Id)
reqHs2 := newHandshake(handshakeType_ServerHelloDone)
reqHs2.ServerHelloDone.Init()
err = s.writeHandshakes([]*handshake{reqHs, reqHs2})
if err != nil {
break
}
case "recv-helloverifyrequest":
reqHs = newHandshake(handshakeType_ClientHello)
err = reqHs.ClientHello.Init(s.Id, s.Client.Random, s.handshake.cookie, s.cipherSuites, s.compressionMethods)
if err != nil {
break
}
err = s.writeHandshake(reqHs)
if err != nil {
break
}
case "recv-serverhellodone":
if len(s.Server.Identity) > 0 {
psk := GetPskFromKeystore(s.Server.Identity, s.peer.String())
if len(psk) > 0 {
s.Client.Identity = s.Server.Identity
s.Psk = psk
}
}
if len(s.Psk) == 0 {
psk := GetPskFromKeystore(s.Client.Identity, s.peer.String())
if len(psk) > 0 {
s.Psk = psk
} else {
err = errors.New("dtls: no psk could be found")
break
}
}
if !s.resumed {
reqHs = newHandshake(handshakeType_ClientKeyExchange)
reqHs.ClientKeyExchange.Init([]byte(s.Client.Identity))
err = s.writeHandshake(reqHs)
if err != nil {
break
}
}
s.initKeyBlock()
rec := newRecord(ContentType_ChangeCipherSpec, s.getEpoch(), s.getNextSequence(), []byte{0x01})
s.incEpoch()
err = s.writeRecord(rec)
if err != nil {
break
}
s.encrypt = true
reqHs = newHandshake(handshakeType_Finished)
reqHs.Finished.Init(s.KeyBlock.MasterSecret, s.getHash(), "client")
err = s.writeHandshake(reqHs)
if err != nil {
break
}
case "finished":
if s.Type == SessionType_Server {
rec := newRecord(ContentType_ChangeCipherSpec, s.getEpoch(), s.getNextSequence(), []byte{0x01})
s.incEpoch()
err = s.writeRecord(rec)
if err != nil {
break
}
s.encrypt = true
reqHs = newHandshake(handshakeType_Finished)
reqHs.Finished.Init(s.KeyBlock.MasterSecret, s.getHash(), "server")
err = s.writeHandshake(reqHs)
if err != nil {
break
}
}
}
}
if err != nil {
s.handshake.state = "failed"
s.handshake.err = err
if HandshakeCompleteCallback != nil {
HandshakeCompleteCallback(s.peer.String(), s.Client.Identity, time.Now().Sub(s.started), err)
}
FORERR:
for {
select {
case s.handshake.done <- err:
continue
default:
break FORERR
}
}
return err
} else {
s.handshake.err = nil
}
if s.handshake.state == "finished" {
if HandshakeCompleteCallback != nil {
HandshakeCompleteCallback(s.peer.String(), s.Client.Identity, time.Now().Sub(s.started), nil)
}
FORFIN:
for {
select {
case s.handshake.done <- nil:
continue
default:
break FORFIN
}
}
}
return nil
}
| 28.107865 | 122 | 0.669012 | 3.046875 |
0b28b47566a0388433df755a312dddf760b4c430 | 1,250 | py | Python | onebarangay_psql/users/tests/test_admin.py | PrynsTag/oneBarangay-PostgreSQL | 11d7b97b57603f4c88948905560a22a5314409ce | [
"Apache-2.0"
] | null | null | null | onebarangay_psql/users/tests/test_admin.py | PrynsTag/oneBarangay-PostgreSQL | 11d7b97b57603f4c88948905560a22a5314409ce | [
"Apache-2.0"
] | 43 | 2022-02-07T00:18:35.000Z | 2022-03-21T04:42:48.000Z | onebarangay_psql/users/tests/test_admin.py | PrynsTag/oneBarangay-PostgreSQL | 11d7b97b57603f4c88948905560a22a5314409ce | [
"Apache-2.0"
] | null | null | null | """Create your tests for the admin app here."""
import pytest
from django.contrib.auth import get_user_model
from django.urls import reverse
pytestmark = pytest.mark.django_db
User = get_user_model()
class TestUserAdmin:
"""Test the admin interface."""
def test_changelist(self, admin_client):
"""Test the changelist view."""
url = reverse("admin:users_user_changelist")
response = admin_client.get(url)
assert response.status_code == 200
def test_search(self, admin_client):
"""Test the search functionality."""
url = reverse("admin:users_user_changelist")
response = admin_client.get(url, data={"q": "test"})
assert response.status_code == 200
def test_add(self, admin_client):
"""Test the add user functionality."""
url = reverse("admin:users_user_add")
response = admin_client.get(url)
assert response.status_code == 200
def test_view_user(self, admin_client):
"""Test the view user functionality."""
user = User.objects.get(username="admin")
url = reverse("admin:users_user_change", kwargs={"object_id": user.pk})
response = admin_client.get(url)
assert response.status_code == 200
| 33.783784 | 79 | 0.668 | 3.125 |
650d0ad17e404144a026ce3f06aafc17ea1fda8f | 1,962 | py | Python | sparse gamma def/gamma_def_score.py | blei-lab/ars-reparameterization | b20a84c28537d85e0aaf62cbbaacb6de9370f0a3 | [
"MIT"
] | 33 | 2017-03-11T10:00:32.000Z | 2022-03-08T14:23:45.000Z | ars-reparameterization/sparse gamma def/gamma_def_score.py | astirn/neural-inverse-cdf-sampling | 80eb2eb7cf396a4e53df62bc126e9a1828f55ca9 | [
"MIT"
] | 2 | 2018-02-05T17:14:00.000Z | 2019-08-02T14:37:25.000Z | ars-reparameterization/sparse gamma def/gamma_def_score.py | astirn/neural-inverse-cdf-sampling | 80eb2eb7cf396a4e53df62bc126e9a1828f55ca9 | [
"MIT"
] | 10 | 2017-03-05T13:31:01.000Z | 2020-03-29T01:09:01.000Z | from autograd import grad
import autograd.numpy as np
import autograd.numpy.random as npr
import autograd.scipy.special as sp
from gamma_def import *
# Define helper functions for score fnc estimator
def logQ(sample, alpha, m):
"""
Evaluates log of variational approximation, vectorized.
"""
temp = alpha*(np.log(alpha)-np.log(m))
temp += (alpha-1.)*np.log(sample)
temp -= alpha*sample/m
temp -= np.log(sp.gamma(alpha))
return temp
def grad_logQ(sample,alpha,m):
"""
Evaluates the gradient of the log of variational approximation, vectorized.
"""
gradient = np.zeros((alpha.shape[0],2))
gradient[:,0] = np.log(alpha) - np.log(m) + 1. + np.log(sample) - sample/m
gradient[:,0] -= sp.digamma(alpha)
gradient[:,1] = -alpha/m + alpha*sample/m**2
return gradient
# Define score function estimator
def score_estimator(alpha,m,x,K,alphaz,S=100):
"""
Form score function estimator based on samples lmbda.
"""
N = x.shape[0]
if x.ndim == 1:
D = 1
else:
D = x.shape[1]
num_z = N*np.sum(K)
L = K.shape[0]
gradient = np.zeros((alpha.shape[0],2))
f = np.zeros((2*S,alpha.shape[0],2))
h = np.zeros((2*S,alpha.shape[0],2))
for s in range(2*S):
lmbda = npr.gamma(alpha,1.)
lmbda[lmbda < 1e-300] = 1e-300
zw = m*lmbda/alpha
lQ = logQ(zw,alpha,m)
gradLQ = grad_logQ(zw,alpha,m)
lP = logp(zw, K, x, alphaz)
temp = lP - np.sum(lQ)
f[s,:,:] = temp*gradLQ
h[s,:,:] = gradLQ
# CV
covFH = np.zeros((alpha.shape[0],2))
covFH[:,0] = np.diagonal(np.cov(f[S:,:,0],h[S:,:,0],rowvar=False)[:alpha.shape[0],alpha.shape[0]:])
covFH[:,1] = np.diagonal(np.cov(f[S:,:,1],h[S:,:,1],rowvar=False)[:alpha.shape[0],alpha.shape[0]:])
a = covFH / np.var(h[S:,:,:],axis=0)
return np.mean(f[:S,:,:],axis=0) - a*np.mean(h[:S,:,:],axis=0)
| 29.727273 | 104 | 0.574414 | 3.453125 |
3590c07c413ceb5ce35b81b4a4ffbcc37aa29cf1 | 2,197 | sql | SQL | master2021/company_secProc.sql | MoDELSVGU/SQLSI | 54e8039cad17f69bb9c20847db69c7ac22875405 | [
"MIT"
] | null | null | null | master2021/company_secProc.sql | MoDELSVGU/SQLSI | 54e8039cad17f69bb9c20847db69c7ac22875405 | [
"MIT"
] | null | null | null | master2021/company_secProc.sql | MoDELSVGU/SQLSI | 54e8039cad17f69bb9c20847db69c7ac22875405 | [
"MIT"
] | null | null | null | DROP PROCEDURE IF EXISTS Query1;
/* SELECT email FROM Employee */
DELIMITER //
CREATE PROCEDURE Query1(in kcaller varchar(250), in krole varchar(250))
BEGIN
DECLARE _rollback int DEFAULT 0;
DECLARE EXIT HANDLER FOR SQLEXCEPTION
BEGIN
SET _rollback = 1;
GET STACKED DIAGNOSTICS CONDITION 1 @p1 = RETURNED_SQLSTATE, @p2 = MESSAGE_TEXT;
SELECT @p1, @p2;
ROLLBACK;
END;
START TRANSACTION;
DROP TEMPORARY TABLE IF EXISTS TEMP1;
CREATE TEMPORARY TABLE TEMP1 AS (
SELECT CASE auth_READ_Employee_email(kcaller, krole, Employee_id) WHEN TRUE THEN email ELSE throw_error() END AS email FROM Employee
);
IF _rollback = 0
THEN SELECT * from TEMP1;
END IF;
END //
DELIMITER ;
DROP PROCEDURE IF EXISTS Query2;
/* SELECT salary FROM Employee JOIN (SELECT * FROM Supervision WHERE supervisors = 'B') ON supervisees = Employeee */
DELIMITER //
CREATE PROCEDURE Query2(in kcaller varchar(250), in krole varchar(250))
BEGIN
DECLARE _rollback int DEFAULT 0;
DECLARE EXIT HANDLER FOR SQLEXCEPTION
BEGIN
SET _rollback = 1;
GET STACKED DIAGNOSTICS CONDITION 1 @p1 = RETURNED_SQLSTATE, @p2 = MESSAGE_TEXT;
SELECT @p1, @p2;
ROLLBACK;
END;
START TRANSACTION;
DROP TEMPORARY TABLE IF EXISTS TEMP1;
CREATE TEMPORARY TABLE TEMP1 AS (
SELECT Employee_id AS supervisees, Employee_id AS supervisors FROM Employee, Employee WHERE Employee_id = 'B'
);
DROP TEMPORARY TABLE IF EXISTS TEMP2;
CREATE TEMPORARY TABLE TEMP2 AS (
SELECT CASE auth_READ_Supervision(kcaller, krole, supervisees, supervisors) WHEN TRUE THEN supervisees ELSE throw_error() END AS supervisees FROM TEMP1
);
DROP TEMPORARY TABLE IF EXISTS TEMP3;
CREATE TEMPORARY TABLE TEMP3 AS (
SELECT * FROM Supervision WHERE supervisors = 'B'
);
DROP TEMPORARY TABLE IF EXISTS TEMP4;
CREATE TEMPORARY TABLE TEMP4 AS (
SELECT * FROM Employee JOIN TEMP3 ON supervisees = Employeee
);
DROP TEMPORARY TABLE IF EXISTS TEMP5;
CREATE TEMPORARY TABLE TEMP5 AS (
SELECT * FROM TEMP4
);
DROP TEMPORARY TABLE IF EXISTS TEMP6;
CREATE TEMPORARY TABLE TEMP6 AS (
SELECT CASE auth_READ_Employee_salary(kcaller, krole, Employee_id) WHEN TRUE THEN salary ELSE throw_error() END AS salary FROM TEMP5
);
IF _rollback = 0
THEN SELECT * from TEMP6;
END IF;
END //
DELIMITER ;
| 31.84058 | 151 | 0.778334 | 3.203125 |
7fd3b2d2bf1b371d0081780ffac85d0eab4453e0 | 1,731 | go | Go | service/UserService.go | watter08/NotitiaGolang | cef8ba1c6d92e3700fafc17f9c3d062738c467ad | [
"MIT"
] | 5 | 2017-11-20T17:38:41.000Z | 2021-11-25T09:11:06.000Z | service/UserService.go | watter08/NotitiaGolang | cef8ba1c6d92e3700fafc17f9c3d062738c467ad | [
"MIT"
] | null | null | null | service/UserService.go | watter08/NotitiaGolang | cef8ba1c6d92e3700fafc17f9c3d062738c467ad | [
"MIT"
] | 5 | 2018-09-07T13:27:50.000Z | 2021-03-12T09:54:36.000Z | package service
import (
"go-restful-mvc/model"
)
func NewUser(user *model.User) (int, interface{}) {
// Your db call service implementation instead of resSuccess:=
resSuccess := model.BaseResponse{
Status: 200,
Message: "Success create new user, email: " + user.Email,
}
return 200, resSuccess
}
func FindUserByEmail(email string) (int, interface{}) {
// Your db call service implementation instead of user:=
user := model.UserDetailDtoResponse{
Email: email,
FullName: "mczal",
Role: "user",
}
result := model.BaseSingleResponse{
Status: 200,
Message: "Success",
Value: user,
}
return 200, result
}
func ScanUser() (int, interface{}) {
// Your db call service implementation instead of users:=
users := []model.UserSimpleDtoResponse{
{
Email: "[email protected]",
FullName: "mczal1",
Role: "user",
},
{
Email: "[email protected]",
FullName: "mczal",
Role: "user",
},
{
Email: "[email protected]",
FullName: "mczal3",
Role: "user",
},
}
userInts := make([]interface{}, len(users))
for i, v := range users {
userInts[i] = interface{}(v)
}
result := model.BaseListResponse{
Status: 200,
Message: "Success",
Content: userInts,
}
return 200, result
}
func FindUserByID(userID string) (int, interface{}) {
// Your db call service implementation instead of user:=
user := model.UserDetailDtoResponse{
Email: "[email protected]",
FullName: "mczal fullname",
Address: "Mczal Street 313",
Role: "user",
PhoneNumber: "1111111",
}
succRes := model.BaseSingleResponse{
Status: 200,
Message: "Success get user by id: " + userID,
Value: user,
}
return 200, succRes
}
| 20.127907 | 63 | 0.641248 | 3.15625 |
9c68062a58c52bec86735b0eb7c560dfe7a5284a | 2,096 | js | JavaScript | waltz-ng/client/common/svelte/calendar-heatmap/calendar-heatmap-utils.js | G-Research/waltz | 512049af0fd117af68f16bf6dd10c12207e8623f | [
"Apache-2.0"
] | 77 | 2016-06-17T11:01:16.000Z | 2020-02-28T04:00:31.000Z | waltz-ng/client/common/svelte/calendar-heatmap/calendar-heatmap-utils.js | khartec/waltz | fdfa6f386b70f2308b9abeecf804be350707b398 | [
"Apache-2.0"
] | 2,778 | 2016-01-21T20:44:52.000Z | 2020-03-09T13:27:07.000Z | waltz-ng/client/common/svelte/calendar-heatmap/calendar-heatmap-utils.js | G-Research/waltz | 512049af0fd117af68f16bf6dd10c12207e8623f | [
"Apache-2.0"
] | 42 | 2016-01-21T21:54:58.000Z | 2020-03-05T21:06:46.000Z | import _ from "lodash";
export function prepareMonthData(data = [], startDate, endDate) {
let months = [];
const startMonth = startDate.getMonth();
const startYear = startDate.getFullYear();
let initialCalendarDate = new Date(startYear, startMonth, 1);
while (initialCalendarDate < endDate) {
const date = new Date(initialCalendarDate);
const month = {
startDate: date,
days: mkDaysForMonth(data, date)
}
months.push(month);
initialCalendarDate = new Date(initialCalendarDate.setMonth(initialCalendarDate.getMonth() + 1))
}
return months
}
function mkDateKeyFromDateStr(dateStr) {
const date = new Date(dateStr);
return mkDateKeyFromComponents(date.getMonth() + 1, date.getDate(), date.getFullYear());
}
function mkDateKeyFromComponents(month, day, year) {
return year * 10000 + month * 100 + day;
}
function toDateFromDateKey(dateKey) {
let year = Math.floor(dateKey / 10000);
let month = Math.floor(dateKey % 10000 / 100);
let date = Math.floor(dateKey % 100);
return new Date(year, month - 1, date);
}
function mkDaysForMonth(data, date) {
let month = date.getMonth() + 1;
let year = date.getFullYear();
let dayCount = daysInMonth(month, year);
let dataByDate = _.keyBy(data, d => mkDateKeyFromDateStr(d.date));
return _.map(_.range(dayCount), x => {
let day = x + 1;
let dateKey = mkDateKeyFromComponents(month, day, year);
let value = _.get(dataByDate, [dateKey, "count"], 0);
return {date: toDateFromDateKey(dateKey), value}
});
}
export function daysInMonth(month, year) {
return new Date(year, month, 0).getDate();
}
export const monthNames = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"];
export const dimensions = {
diagram: {
width: 2400,
height: 800
},
day: {
width: 20
},
month: {
width: 150,
height: 160
},
circleRadius: 8,
weekPadding: 10,
monthsPerLine: 6
} | 23.288889 | 111 | 0.619275 | 3.125 |
168b54e73d5a4d57fae827d58cdc591a06aa87c1 | 1,455 | h | C | include/FrameBuffer.h | danielabbott/Painting-Application-prototype | 96de14a184f8649b609d618c6b24ea6b0c580c68 | [
"MIT"
] | null | null | null | include/FrameBuffer.h | danielabbott/Painting-Application-prototype | 96de14a184f8649b609d618c6b24ea6b0c580c68 | [
"MIT"
] | 1 | 2018-11-04T08:12:43.000Z | 2019-02-10T14:18:23.000Z | include/FrameBuffer.h | danielabbott/Painting-Application-prototype | 96de14a184f8649b609d618c6b24ea6b0c580c68 | [
"MIT"
] | null | null | null | #pragma once
#include <ArrayTexture.h>
// Use glBindFragDataLocation to bind fragment shader outputs to a colour attachment
class ArrayTextureFrameBuffer
{
GLuint frameBufferName = 0;
unsigned int size = 0;
ArrayTexture * arrayTexture;
public:
ArrayTextureFrameBuffer(ArrayTexture & arrayTexture, unsigned int arrayTextureIndex);
ArrayTextureFrameBuffer(ArrayTextureFrameBuffer const&) = delete;
ArrayTextureFrameBuffer(ArrayTextureFrameBuffer &&) = delete;
ArrayTextureFrameBuffer& operator=(const ArrayTextureFrameBuffer&&) = delete;
~ArrayTextureFrameBuffer();
// For drawing on the framebuffer
void bindFrameBuffer() const;
};
class FrameBuffer
{
GLuint frameBufferName = 0;
GLuint backingTextureId;
ImageFormat type;
unsigned int width = 0;
unsigned int height = 0;
void create();
public:
FrameBuffer(ImageFormat type, unsigned int width, unsigned int height, bool createWhenNeeded = false);
FrameBuffer(FrameBuffer const&) = delete;
FrameBuffer(FrameBuffer &&) = default;
FrameBuffer& operator=(const FrameBuffer&&) = delete;
~FrameBuffer();
// uses GL_ARB_clear_texture if available, otherwise will bind the framebuffer
void clear();
// For drawing on the framebuffer
void bindFrameBuffer();
// For using this framebuffer as a texture to draw with
void bindTexture();
// The texture MUST be bound when this function is called
void getTexureData(void * outputBuffer);
};
void bind_default_framebuffer();
| 25.982143 | 103 | 0.77457 | 3.03125 |
59452a77e1114367768125aa4e3d4d08a8301a28 | 4,957 | lua | Lua | projects/tutorials/tutscripts/T009-udp_network.lua | pixeljetstream/luxinia1 | 5d69b2d47d5ed4501dc155cfef999475f2fdfe2a | [
"Unlicense",
"MIT"
] | 31 | 2015-01-05T18:22:15.000Z | 2020-12-07T03:21:50.000Z | projects/tutorials/tutscripts/T009-udp_network.lua | pixeljetstream/luxinia1 | 5d69b2d47d5ed4501dc155cfef999475f2fdfe2a | [
"Unlicense",
"MIT"
] | null | null | null | projects/tutorials/tutscripts/T009-udp_network.lua | pixeljetstream/luxinia1 | 5d69b2d47d5ed4501dc155cfef999475f2fdfe2a | [
"Unlicense",
"MIT"
] | 12 | 2015-01-05T19:17:44.000Z | 2021-01-15T08:56:06.000Z | -- UDP Server/Client application
view = UtilFunctions.simplerenderqueue()
view.rClear:colorvalue(0.0,0.0,0.0,0)
dofile "T009/udp.lua"
mainframe = TitleFrame:new(0,0,180,80,nil,"Main Menu")
mainframe.startserver = mainframe:add(
Button:new(5,25,170,25,"Start server"))
mainframe.startclient = mainframe:add(
Button:new(5,50,170,25,"Start client"))
--------- logging component
function createLogLabel (...)
local label = Label:new(...)
label:setAlignment(
Label.LABEL_ALIGNLEFT,Label.LABEL_ALIGNTOP)
function label:reset() self.loglines = {} end
function label:log(fmt,...)
local line = select('#',...)==0 and tostring(fmt)
or fmt:format(...)
local str = self:wrapLine(line)
for nl,line in str:gmatch("(\n?)([^\n]*)") do
if #nl+#line>0 then
table.insert(self.loglines,line)
end
end
self:scrollto()
end
function label:scrollto(line)
line = line or #self.loglines
local lines = {}
for i=math.max(1,line-self:getMaxLines()+1),line do
lines[#lines+1] = self.loglines[i]
end
self:setText(table.concat(lines,"\n"))
end
label:reset()
return label
end
mainframe.serverlog = mainframe:add(
createLogLabel(10,28,160,0))
------- moving the frame
local function window_mousePressed(self,me,contains)
if not contains then return end
self:getParent():moveZ(self,1) -- move to front
if me.y<25 then
self:lockMouse()
self.mouselockpos = {me.x,me.y}
end
end
function window_mouseReleased(self,me)
if self:isMouseLocker() then self:unlockMouse() end
end
function window_mouseMoved(self,me)
if self:isMouseLocker() then
local x,y = self:getLocation()
local dx,dy =
me.x-self.mouselockpos[1],
me.y-self.mouselockpos[2]
self:setLocation(x+dx,y+dy)
end
end
mainframe.mousePressed = window_mousePressed
mainframe.mouseReleased = window_mouseReleased
mainframe.mouseMoved = window_mouseMoved
-------- runing the server
function startserver ()
mainframe.serverruns = true
local function logger(...)
mainframe.serverlog:log(...)
end
local function closed ()
return not mainframe.serverruns
end
local function broadcast ()
end
server (logger,closed,broadcast)
Timer.remove("Server")
mainframe.serverruns = nil
end
------- start / stop the server
function mainframe.startserver:onClicked()
if mainframe.serverruns==nil then
mainframe:setSize(180,300)
mainframe.startserver:setText("Stop Server")
mainframe.startserver:setLocation(5,245)
mainframe.startclient:setLocation(5,270)
mainframe.serverlog:setSize(160,210)
mainframe.serverlog:scrollto()
Timer.set("Server",startserver,50)
else
mainframe:setSize(180,80)
mainframe.serverruns = false
mainframe.startserver:setText("Start Server")
mainframe.startserver:setLocation(5,25)
mainframe.startclient:setLocation(5,50)
mainframe.serverlog:setSize(160,0)
end
end
------- start a client
function mainframe.startclient:onClicked()
local window = TitleFrame:new(
mainframe:getX()+mainframe:getWidth(),mainframe:getY(),
200,150,nil,"Client")
Container.getRootContainer():add(window,1)
window.mousePressed = window_mousePressed
window.mouseReleased = window_mouseReleased
window.mouseMoved = window_mouseMoved
local close = window:add(Button:new(168,4,30,20,"exit"))
local running
local conpanel = window:add(GroupFrame:new(10,40,180,80))
conpanel:add(Label:new(10,10,160,16,"Server adress:"))
local serveradr = conpanel:add(TextField:new(10,26,160,20))
local connect = conpanel:add(Button:new(100,48,70,20,
"connect"))
serveradr:setText("localhost")
local chatpanel = GroupFrame:new(4,24,194,124)
local log = chatpanel:add(createLogLabel(5,5,170,90))
local sendtx = chatpanel:add(TextField:new(5,95,120,20))
local send = chatpanel:add(Button:new(122,95,40,20,"Send"))
local bye = chatpanel:add(Button:new(160,95,30,20,"Bye"))
local sendqueue = {}
function send:onClicked()
table.insert(sendqueue,sendtx:getText())
sendtx:setText("")
end
sendtx.onAction = send.onClicked
function bye:onClicked()
running = false
end
function close:onClicked()
running = false
window:remove()
end
function connect:onClicked()
if running~=nil then return end
running = true
conpanel:remove()
window:add(chatpanel)
local function closeit () return not running end
local function receiver (...) log:log(...) end
local function sender ()
return table.remove(sendqueue,1)
end
TimerTask.new(
function ()
client(serveradr:getText(),receiver,sender,closeit)
running = nil
chatpanel:remove()
window:add(conpanel)
end,50)
end
end
Container.getRootContainer():add(mainframe)
MouseCursor.showMouse(true)
-- cleanup for tutorial framework
return function() Timer.remove("Server") end | 26.089474 | 61 | 0.692556 | 3.453125 |
e3c933775877e1a29db56b69abdf7df09bfbc274 | 1,185 | go | Go | packetizer_test.go | lightsofapollo/rtp | cfeee237106ea293979796c30a3a1e1fbafaa355 | [
"MIT"
] | null | null | null | packetizer_test.go | lightsofapollo/rtp | cfeee237106ea293979796c30a3a1e1fbafaa355 | [
"MIT"
] | null | null | null | packetizer_test.go | lightsofapollo/rtp | cfeee237106ea293979796c30a3a1e1fbafaa355 | [
"MIT"
] | null | null | null | package rtp
import (
"fmt"
"testing"
"time"
"github.com/pion/rtp/codecs"
"github.com/stretchr/testify/assert"
)
func TestNtpConversion(t *testing.T) {
loc := time.FixedZone("UTC-5", -5*60*60)
tests := []struct {
t time.Time
n uint64
}{
{t: time.Date(1985, time.June, 23, 4, 0, 0, 0, loc), n: 0xa0c65b1000000000},
{t: time.Date(1999, time.December, 31, 23, 59, 59, 500000, loc), n: 0xbc18084f0020c49b},
{t: time.Date(2019, time.March, 27, 13, 39, 30, 8675309, loc), n: 0xe04641e202388b88},
}
for _, in := range tests {
out := toNtpTime(in.t)
assert.Equal(t, in.n, out)
}
}
func TestPacketizer(t *testing.T) {
multiplepayload := make([]byte, 128)
//use the G722 payloader here, because it's very simple and all 0s is valid G722 data.
packetizer := NewPacketizer(100, 98, 0x1234ABCD, &codecs.G722Payloader{}, NewRandomSequencer(), 90000)
packets := packetizer.Packetize(multiplepayload, 2000)
if len(packets) != 2 {
packetlengths := ""
for i := 0; i < len(packets); i++ {
packetlengths += fmt.Sprintf("Packet %d length %d\n", i, len(packets[i].Payload))
}
t.Fatalf("Generated %d packets instead of 2\n%s", len(packets), packetlengths)
}
}
| 26.931818 | 103 | 0.662447 | 3.03125 |
1b156cb4ea5bde552c19b3569a25db9f053abd1e | 689 | lua | Lua | src/ServerScriptService/temporary/Footsteps/Pitch.server.lua | DevArke/starstream | 79af1414f5140519951ed023883ad2144b36a198 | [
"Apache-2.0"
] | null | null | null | src/ServerScriptService/temporary/Footsteps/Pitch.server.lua | DevArke/starstream | 79af1414f5140519951ed023883ad2144b36a198 | [
"Apache-2.0"
] | null | null | null | src/ServerScriptService/temporary/Footsteps/Pitch.server.lua | DevArke/starstream | 79af1414f5140519951ed023883ad2144b36a198 | [
"Apache-2.0"
] | null | null | null | while wait(.3) do
x = script.Parent:GetChildren()
for i = 1,#x do
if x[i]:IsA("Sound") then
x[i].Pitch = x[i].Pitch - 0.1
end
end
wait(.3)
x = script.Parent:GetChildren()
for i = 1,#x do
if x[i]:IsA("Sound") then
x[i].Pitch = x[i].Pitch - 0.1
end
end
wait(.3)
x = script.Parent:GetChildren()
for i = 1,#x do
if x[i]:IsA("Sound") then
x[i].Pitch = x[i].Pitch + 0.2
end
end
wait(.3)
x = script.Parent:GetChildren()
for i = 1,#x do
if x[i]:IsA("Sound") then
x[i].Pitch = x[i].Pitch - 0.1
end
end
wait(.3)
x = script.Parent:GetChildren()
for i = 1,#x do
if x[i]:IsA("Sound") then
x[i].Pitch = x[i].Pitch + 0.1
end
end
end
| 14.978261 | 32 | 0.558781 | 3.015625 |
967516e82be9ecdb82149ef6fb8f5a617ce39ec4 | 1,738 | kt | Kotlin | retroauth-android/src/main/java/com/andretietz/retroauth/WeakActivityStack.kt | moovel/retroauth | b5334f354c5d04f8f3d39af72cbd6fb79aaa10c3 | [
"Apache-2.0"
] | 279 | 2015-06-29T08:08:16.000Z | 2015-09-24T04:02:06.000Z | retroauth-android/src/main/java/com/andretietz/retroauth/WeakActivityStack.kt | moovel/retroauth | b5334f354c5d04f8f3d39af72cbd6fb79aaa10c3 | [
"Apache-2.0"
] | 51 | 2015-10-24T21:50:13.000Z | 2020-12-22T19:52:33.000Z | retroauth-android/src/main/java/com/andretietz/retroauth/WeakActivityStack.kt | moovel/retroauth | b5334f354c5d04f8f3d39af72cbd6fb79aaa10c3 | [
"Apache-2.0"
] | 34 | 2015-10-06T17:34:48.000Z | 2021-03-19T15:50:14.000Z | /*
* Copyright (c) 2016 Andre Tietz
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.andretietz.retroauth
import android.app.Activity
import android.util.SparseArray
import java.lang.ref.WeakReference
import java.util.LinkedList
internal class WeakActivityStack {
private val map = SparseArray<WeakReference<Activity>>()
private val stack = LinkedList<Int>()
fun push(item: Activity) {
val identifier = getIdentifier(item)
synchronized(this) {
stack.push(identifier)
map.put(identifier, WeakReference(item))
}
}
fun pop(): Activity? {
synchronized(this) {
if (!stack.isEmpty()) {
val identifier = stack.removeFirst()
val item = map.get(requireNotNull(identifier)).get()
map.remove(identifier)
return item
}
return null
}
}
fun remove(item: Activity) {
val identifier = getIdentifier(item)
synchronized(this) {
stack.remove(identifier)
map.remove(identifier)
}
}
fun peek(): Activity? {
synchronized(this) {
if (!stack.isEmpty()) {
return map.get(stack.first).get()
}
}
return null
}
private fun getIdentifier(item: Activity) = item.hashCode()
}
| 24.828571 | 75 | 0.679517 | 3.03125 |
ae0d3d1794bfbcd39384438b73c4f828a5dd067e | 1,629 | sql | SQL | sql/create_indiv_seg_support.sql | crashka/fecdb | b5c35ae782c24fc6a64c989e0107074d1684879f | [
"MIT"
] | null | null | null | sql/create_indiv_seg_support.sql | crashka/fecdb | b5c35ae782c24fc6a64c989e0107074d1684879f | [
"MIT"
] | null | null | null | sql/create_indiv_seg_support.sql | crashka/fecdb | b5c35ae782c24fc6a64c989e0107074d1684879f | [
"MIT"
] | null | null | null | --
-- Individual Segment
--
CREATE TABLE IF NOT EXISTS indiv_seg (
id BIGINT PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY,
name TEXT NOT NULL,
description TEXT
)
WITH (FILLFACTOR=70);
--
-- Individual Segment Members (composed of `indiv` records)
--
CREATE TABLE IF NOT EXISTS indiv_seg_memb (
id BIGINT PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY,
indiv_seg_id BIGINT NOT NULL,
indiv_id BIGINT NOT NULL
)
WITH (FILLFACTOR=70);
ALTER TABLE indiv_seg_memb ADD FOREIGN KEY (indiv_seg_id) REFERENCES indiv_seg (id) ON DELETE CASCADE;
ALTER TABLE indiv_seg_memb ADD FOREIGN KEY (indiv_id) REFERENCES indiv (id);
CREATE UNIQUE INDEX indiv_seg_name ON indiv_seg (name);
CREATE UNIQUE INDEX indiv_seg_memb_user_key ON indiv_seg_memb (indiv_seg_id, indiv_id);
CREATE INDEX indiv_seg_memb_indiv_id ON indiv_seg_memb (indiv_id);
CREATE OR REPLACE FUNCTION create_indiv_seg(indiv_ids BIGINT[], seg_name text, seg_desc text = null)
RETURNS BIGINT AS $$
DECLARE
indiv_tbl TEXT = 'indiv';
seg_id BIGINT;
indiv_id BIGINT;
BEGIN
EXECUTE 'insert into indiv_seg (name, description)
values ($1, $2)
on conflict do nothing
returning id'
INTO seg_id
USING seg_name, seg_desc;
FOREACH indiv_id IN ARRAY indiv_ids
LOOP
EXECUTE
'insert into indiv_seg_memb(indiv_seg_id, indiv_id)
values ($1, $2)
on conflict do nothing'
USING seg_id, indiv_id;
END LOOP;
RETURN seg_id;
END;
$$ LANGUAGE plpgsql;
| 30.735849 | 102 | 0.668508 | 3.015625 |
503e5c546d7eb8c9659d6bad2509bf26a67cef4c | 1,454 | lua | Lua | examples/game_server.lua | samuelwbaird/brogue | 2eb185698f1fe94cc3ed6f72360c29203342fe8b | [
"MIT"
] | 1 | 2020-06-17T06:11:37.000Z | 2020-06-17T06:11:37.000Z | examples/game_server.lua | samuelwbaird/brogue | 2eb185698f1fe94cc3ed6f72360c29203342fe8b | [
"MIT"
] | null | null | null | examples/game_server.lua | samuelwbaird/brogue | 2eb185698f1fe94cc3ed6f72360c29203342fe8b | [
"MIT"
] | null | null | null | -- demonstration game server
-- copyright 2014 Samuel Baird MIT Licence
-- reference the brogue libraries
package.path = '../source/?.lua;' .. package.path
-- demonstrate a game server with the following features
-- sessions using cookies (no real security)
-- long polling status updates
-- rascal proxies used to run game logic in its own thread
-- use rascal
local rascal = require('rascal.core')
-- configure logging
-- rascal.log_service:log_to_file('log/game_server.log')
rascal.log_service:log_to_console(true)
-- standard rascal session db
rascal.service('rascal.session.session_server', { 'db/session.sqlite' })
-- we are going to use the game of blockers and runners
-- as demonstrated in the ORM example
-- the game will run in its own microserver process
-- launch this class as a micro server, with these parameters
rascal.service('classes.game_thread', { 'db/game.sqlite' })
-- configure an HTTP server
rascal.http_server('tcp://*:8080', 1, [[
prefix('/', {
-- access static files under resources prefix
prefix('resources/', {
static('static/', nil, false),
}),
-- otherwise chain in our custom handler
chain('classes.game_session', {}, {
prefix('api_', {
handler('classes.game_api', {}),
}),
handler('classes.game_view', {}),
}),
redirect('/')
})
]])
log('open your browser at http://localhost:8080/')
log('ctrl-c to exit')
-- last thing to do is run the main event loop
rascal.run_loop()
| 25.964286 | 72 | 0.700825 | 3.125 |
545fa3602157682276f1fc088fdd2cb3a9d2e535 | 5,097 | go | Go | model/mock/interfaces.go | developertask/multiwallet | cbc739f642604647d6698a4a5cb7621dc7f66afa | [
"MIT"
] | null | null | null | model/mock/interfaces.go | developertask/multiwallet | cbc739f642604647d6698a4a5cb7621dc7f66afa | [
"MIT"
] | null | null | null | model/mock/interfaces.go | developertask/multiwallet | cbc739f642604647d6698a4a5cb7621dc7f66afa | [
"MIT"
] | null | null | null | package mock
import (
"encoding/hex"
"errors"
"fmt"
"sync"
gosocketio "github.com/developertask/golang-socketio"
"github.com/developertask/multiwallet/client"
"github.com/developertask/multiwallet/model"
"github.com/btcsuite/btcutil"
)
type MockAPIClient struct {
blockChan chan model.Block
txChan chan model.Transaction
listeningAddrs []btcutil.Address
chainTip int
feePerBlock int
info *model.Info
addrToScript func(btcutil.Address) ([]byte, error)
}
func NewMockApiClient(addrToScript func(btcutil.Address) ([]byte, error)) model.APIClient {
return &MockAPIClient{
blockChan: make(chan model.Block),
txChan: make(chan model.Transaction),
chainTip: 0,
addrToScript: addrToScript,
feePerBlock: 1,
info: &MockInfo,
}
}
func (m *MockAPIClient) Start() error {
return nil
}
func (m *MockAPIClient) GetInfo() (*model.Info, error) {
return m.info, nil
}
func (m *MockAPIClient) GetTransaction(txid string) (*model.Transaction, error) {
for _, tx := range MockTransactions {
if tx.Txid == txid {
return &tx, nil
}
}
return nil, errors.New("Not found")
}
func (m *MockAPIClient) GetRawTransaction(txid string) ([]byte, error) {
if raw, ok := MockRawTransactions[txid]; ok {
return raw, nil
}
return nil, errors.New("Not found")
}
func (m *MockAPIClient) GetTransactions(addrs []btcutil.Address) ([]model.Transaction, error) {
txs := make([]model.Transaction, len(MockTransactions))
copy(txs, MockTransactions)
txs[0].Outputs[1].ScriptPubKey.Addresses = []string{addrs[0].String()}
txs[1].Inputs[0].Addr = addrs[0].String()
txs[1].Outputs[1].ScriptPubKey.Addresses = []string{addrs[1].String()}
txs[2].Outputs[1].ScriptPubKey.Addresses = []string{addrs[2].String()}
return txs, nil
}
func (m *MockAPIClient) GetUtxos(addrs []btcutil.Address) ([]model.Utxo, error) {
utxos := make([]model.Utxo, len(MockUtxos))
copy(utxos, MockUtxos)
utxos[0].Address = addrs[1].String()
script, _ := m.addrToScript(addrs[1])
utxos[0].ScriptPubKey = hex.EncodeToString(script)
utxos[1].Address = addrs[2].String()
script, _ = m.addrToScript(addrs[2])
utxos[1].ScriptPubKey = hex.EncodeToString(script)
return utxos, nil
}
func (m *MockAPIClient) BlockNotify() <-chan model.Block {
return m.blockChan
}
func (m *MockAPIClient) TransactionNotify() <-chan model.Transaction {
return m.txChan
}
func (m *MockAPIClient) ListenAddresses(addrs ...btcutil.Address) {
m.listeningAddrs = append(m.listeningAddrs, addrs...)
}
func (m *MockAPIClient) Broadcast(tx []byte) (string, error) {
return "a8c685478265f4c14dada651969c45a65e1aeb8cd6791f2f5bb6a1d9952104d9", nil
}
func (m *MockAPIClient) GetBestBlock() (*model.Block, error) {
return &MockBlocks[m.chainTip], nil
}
func (m *MockAPIClient) EstimateFee(nBlocks int) (int, error) {
return m.feePerBlock * nBlocks, nil
}
func (m *MockAPIClient) Close() {}
func MockWebsocketClientOnClientPool(p *client.ClientPool) *MockSocketClient {
var (
callbacksMap = make(map[string]func(*gosocketio.Channel, interface{}))
mockSocketClient = &MockSocketClient{
callbacks: callbacksMap,
listeningAddresses: []string{},
}
)
for _, c := range p.Clients() {
c.SocketClient = mockSocketClient
}
return mockSocketClient
}
func NewMockWebsocketClient() *MockSocketClient {
var (
callbacksMap = make(map[string]func(*gosocketio.Channel, interface{}))
mockSocketClient = &MockSocketClient{
callbacks: callbacksMap,
listeningAddresses: []string{},
}
)
return mockSocketClient
}
type MockSocketClient struct {
callbackMutex sync.Mutex
callbacks map[string]func(*gosocketio.Channel, interface{})
listeningAddresses []string
}
func (m *MockSocketClient) SendCallback(method string, args ...interface{}) {
if gosocketChan, ok := args[0].(*gosocketio.Channel); ok {
m.callbacks[method](gosocketChan, args[1])
} else {
m.callbacks[method](nil, args[1])
}
}
func (m *MockSocketClient) IsListeningForAddress(addr string) bool {
for _, a := range m.listeningAddresses {
if a == addr {
return true
}
}
return false
}
func (m *MockSocketClient) On(method string, callback interface{}) error {
c, ok := callback.(func(h *gosocketio.Channel, args interface{}))
if !ok {
return fmt.Errorf("failed casting mock callback: %+v", callback)
}
m.callbackMutex.Lock()
defer m.callbackMutex.Unlock()
if method == "bitcoind/addresstxid" {
m.callbacks[method] = c
} else if method == "bitcoind/hashblock" {
m.callbacks[method] = c
}
return nil
}
func (m *MockSocketClient) Emit(method string, args []interface{}) error {
if method == "subscribe" {
subscribeTo, ok := args[0].(string)
if !ok || subscribeTo != "bitcoind/addresstxid" {
return fmt.Errorf("first emit arg is not bitcoind/addresstxid, was: %+v", args[0])
}
addrs, ok := args[1].([]string)
if !ok {
return fmt.Errorf("second emit arg is not address value, was %+v", args[1])
}
m.listeningAddresses = append(m.listeningAddresses, addrs...)
}
return nil
}
func (m *MockSocketClient) Close() {}
| 26.968254 | 95 | 0.701982 | 3 |
103cd0ece26ced39ee383132870ac9dfb2e6d259 | 1,492 | sql | SQL | amps-maven-plugin/src/main/resources/com/atlassian/maven/plugins/amps/product/jira/oracle12c-template.sql | acidburn0zzz/atlassian-amps | c69bd0a64173325d3a0304362ee8537ee32a30b7 | [
"Apache-2.0"
] | null | null | null | amps-maven-plugin/src/main/resources/com/atlassian/maven/plugins/amps/product/jira/oracle12c-template.sql | acidburn0zzz/atlassian-amps | c69bd0a64173325d3a0304362ee8537ee32a30b7 | [
"Apache-2.0"
] | 2 | 2021-02-03T19:35:38.000Z | 2021-08-02T17:04:27.000Z | amps-maven-plugin/src/main/resources/com/atlassian/maven/plugins/amps/product/jira/oracle12c-template.sql | Acidburn0zzz/atlassian-amps | c69bd0a64173325d3a0304362ee8537ee32a30b7 | [
"Apache-2.0"
] | null | null | null | -- This script must be run as a user with the "SYSDBA" role
DECLARE
v_count INTEGER := 0;
v_sid VARCHAR2(20);
BEGIN
SELECT SYS_CONTEXT('userenv','instance_name') INTO v_sid FROM DUAL;
-- Ensure we're in the root container
EXECUTE IMMEDIATE 'ALTER SESSION SET CONTAINER=CDB$ROOT';
-- Configure the Data Pump directory
EXECUTE IMMEDIATE q'{CREATE OR REPLACE DIRECTORY DATA_PUMP_DIR AS 'v_data_pump_dir'}';
-- Does the JIRA pluggable DB exist?
SELECT COUNT (1) INTO v_count FROM cdb_pdbs WHERE pdb_name = 'JIRA_PDB';
IF v_count > 0
THEN
-- Yes, close and drop it
EXECUTE IMMEDIATE 'ALTER PLUGGABLE DATABASE JIRA_PDB CLOSE';
EXECUTE IMMEDIATE 'DROP PLUGGABLE DATABASE JIRA_PDB INCLUDING DATAFILES';
END IF;
-- [Re]create the JIRA pluggable DB, switch to it, and open it
EXECUTE IMMEDIATE 'CREATE PLUGGABLE DATABASE JIRA_PDB ' ||
'ADMIN USER jira_dba IDENTIFIED BY jira_dba ' ||
'FILE_NAME_CONVERT = (''/u01/app/oracle/oradata/' || v_sid || '/pdbseed/'',''/u01/app/oracle/oradata/' || v_sid || '/JIRA_PDB/'')';
EXECUTE IMMEDIATE 'ALTER SESSION SET CONTAINER=JIRA_PDB';
EXECUTE IMMEDIATE 'ALTER PLUGGABLE DATABASE OPEN';
-- Create the JIRA user/schema in the JIRA DB
EXECUTE IMMEDIATE 'CREATE USER v_jira_user IDENTIFIED BY v_jira_pwd';
EXECUTE IMMEDIATE 'GRANT CONNECT, RESOURCE, IMP_FULL_DATABASE TO v_jira_user';
EXECUTE IMMEDIATE 'GRANT READ, WRITE ON DIRECTORY DATA_PUMP_DIR TO v_jira_user';
END; | 43.882353 | 151 | 0.720509 | 3.125 |
e949c7d4a2f90769f80d4f95462eae913f888637 | 1,627 | kt | Kotlin | src/test/kotlin/pro/devil/ex/collections/LinkedListTests.kt | pro-devil-repos/collections-examples | 8c9b8adb98afb42389faeeccf60b826d18593eb0 | [
"Apache-2.0"
] | null | null | null | src/test/kotlin/pro/devil/ex/collections/LinkedListTests.kt | pro-devil-repos/collections-examples | 8c9b8adb98afb42389faeeccf60b826d18593eb0 | [
"Apache-2.0"
] | null | null | null | src/test/kotlin/pro/devil/ex/collections/LinkedListTests.kt | pro-devil-repos/collections-examples | 8c9b8adb98afb42389faeeccf60b826d18593eb0 | [
"Apache-2.0"
] | null | null | null | package pro.devil.ex.collections
import org.junit.jupiter.api.Test
import org.junit.jupiter.api.TestInstance
import org.junit.jupiter.api.assertDoesNotThrow
import pro.devil.ex.collections.linkedList.LinkedList
import pro.devil.ex.collections.linkedList.linkedListOf
import pro.devil.ex.collections.linkedList.mutableLinkedListOf
import kotlin.test.assertEquals
import kotlin.test.assertFalse
import kotlin.test.assertNotNull
import kotlin.test.assertTrue
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
class LinkedListTests {
@Test
fun test_linked_list_empty_create() {
assertDoesNotThrow {
val l1 = LinkedList<Int>()
val l2 = linkedListOf<Double>()
assertTrue { l1.isEmpty() }
assertTrue { l2.isEmpty() }
}
}
@Test
fun test_linked_list_create_filled() {
assertDoesNotThrow {
val list = linkedListOf<Double>(1.23, 3.45, 0.674)
assertEquals(3, list.size)
}
}
@Test
fun test_linked_list_get_node_at() {
val list = linkedListOf<Int>(15, 345, 674)
assertNotNull(list.nodeAt(1))
assertEquals(345, list.nodeAt(1)?.value)
}
@Test
fun test_contains_element() {
val list = mutableLinkedListOf(15, 345, 674, 34, 674)
assertTrue { list.contains(674) }
assertFalse { list.contains(111) }
}
@Test
fun test_contains_all_elements() {
val list = mutableLinkedListOf(15, 345, 674, 34, 674)
assertTrue { list.containsAll(listOf(15, 34)) }
assertFalse { list.containsAll(listOf(15, 34, 12)) }
}
} | 27.116667 | 62 | 0.666872 | 3.0625 |
21ac000fb70ccae8e3253f3990a55f0b3d2efcff | 962 | rs | Rust | common/dnode-rest-client/src/ds.rs | villesundell/move-tools | 3af57b46f897ab8fb4430f00442fba7f47be440b | [
"MIT"
] | 10 | 2020-06-10T03:51:28.000Z | 2022-03-16T18:38:43.000Z | common/dnode-rest-client/src/ds.rs | villesundell/move-tools | 3af57b46f897ab8fb4430f00442fba7f47be440b | [
"MIT"
] | 8 | 2020-10-28T09:11:24.000Z | 2021-03-22T18:43:15.000Z | common/dnode-rest-client/src/ds.rs | villesundell/move-tools | 3af57b46f897ab8fb4430f00442fba7f47be440b | [
"MIT"
] | 10 | 2020-06-28T07:34:39.000Z | 2021-05-07T10:16:23.000Z | use serde::{Serialize, Deserialize};
/// Block number
pub type Block = u128;
/// Data Api response
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Response {
/// Current block number
#[serde(deserialize_with = "block::deserialize")]
pub height: Block,
#[serde(flatten)]
pub body: ResponseBody,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename_all = "lowercase")]
pub enum ResponseBody {
/// Success response
Result {
/// Hex encoded bytecode
value: String,
},
///Error response
Error {
#[serde(rename = "error")]
message: String,
},
}
mod block {
use super::Block;
use serde::{Deserialize, Deserializer};
pub fn deserialize<'de, D>(deserializer: D) -> Result<Block, D::Error>
where
D: Deserializer<'de>,
{
String::deserialize(deserializer)?
.parse()
.map_err(serde::de::Error::custom)
}
}
| 21.377778 | 74 | 0.601871 | 3.140625 |
e981e6e71fed00b795cc49b3b4f0dd2f3b55439c | 7,916 | rb | Ruby | test/support/base_test_model.rb | deivid-rodriguez/activerecord_where_assoc | 100318de80dea5f3c177526c3f824fda307ebc04 | [
"MIT"
] | 150 | 2017-09-26T13:39:58.000Z | 2022-02-28T07:20:51.000Z | test/support/base_test_model.rb | deivid-rodriguez/activerecord_where_assoc | 100318de80dea5f3c177526c3f824fda307ebc04 | [
"MIT"
] | 6 | 2017-07-29T20:33:08.000Z | 2021-02-23T19:47:48.000Z | test/support/base_test_model.rb | deivid-rodriguez/activerecord_where_assoc | 100318de80dea5f3c177526c3f824fda307ebc04 | [
"MIT"
] | 6 | 2018-02-21T19:34:28.000Z | 2022-02-15T20:40:37.000Z | # frozen_string_literal: true
require "prime"
class BaseTestModel < ActiveRecord::Base
self.abstract_class = true
# We give a distinct prime number to ever conditions we use as part of associations
# and default_scopes, and we record it, do that we can easily get a number that would match
# each of them by multiplying them.
# The conditions themselves use modulo, so at long as the value is a multiple, it all works.
@@condition_values_enumerator = Prime.each # rubocop:disable Style/ClassVars
# Hash of [Model.name, association_name] => value
# association_name can also be :default_scope, :custom_scope
@@model_associations_conditions = {} # rubocop:disable Style/ClassVars
def self.setup_test_default_scope
value = need_test_condition_value_for(:default_scope)
condition = testable_condition(value)
default_scope -> { where(condition) }
end
def self.test_condition_column
"#{table_name}_column"
end
delegate :test_condition_column, to: "self.class"
def self.adhoc_column_name
"#{table_name}_adhoc_column"
end
delegate :adhoc_column_name, to: "self.class"
def self.need_test_condition_value_for(association_name)
@@model_associations_conditions[[self.name, association_name.to_s]] ||= @@condition_values_enumerator.next
end
def self.test_condition_value_for(association_name)
@@model_associations_conditions[[self.name, association_name.to_s]]
end
def self.model_associations_conditions
@@model_associations_conditions
end
def self.testable_condition(value)
"#{table_name}.#{test_condition_column} % #{value} = 0"
end
# Creates an association with a condition on #{target_table_name}.#{target_table_name}_column
def self.testable_association(macro, association_name, given_scope = nil, **options)
if given_scope.is_a?(Hash)
options = given_scope
given_scope = nil
end
condition_value = need_test_condition_value_for(association_name)
if given_scope
scope = -> { where(testable_condition(condition_value)).instance_exec(&given_scope) }
else
scope = -> { where(testable_condition(condition_value)) }
end
send(macro, association_name, scope, **options)
end
def self.testable_has_many(association_name, given_scope = nil, **options)
raise "association_name should start with 'm'" unless association_name.to_s.start_with?("m")
testable_association(:has_many, association_name, given_scope, **options)
end
def self.testable_has_one(association_name, given_scope = nil, **options)
raise "association_name should start with 'o'" unless association_name.to_s.start_with?("o")
testable_association(:has_one, association_name, given_scope, **options)
end
def self.testable_belongs_to(association_name, given_scope = nil, **options)
raise "association_name should start with 'b'" unless association_name.to_s.start_with?("b")
testable_association(:belongs_to, association_name, given_scope, **options)
end
def self.testable_has_and_belongs_to_many(association_name, given_scope = nil, **options)
raise "association_name should start with 'z'" unless association_name.to_s.start_with?("z")
testable_association(:has_and_belongs_to_many, association_name, given_scope, **options)
end
def self.create_default!(*source_associations)
condition_value = TestHelpers.condition_value_result_for(*source_associations) || 1
condition_value *= need_test_condition_value_for(:default_scope)
create!(test_condition_column => condition_value)
end
def create_has_one!(association_name, attrs = {})
association_name = ActiveRecordWhereAssoc::ActiveRecordCompat.normalize_association_name(association_name)
reflection = self.class.reflections[association_name]
raise "Didn't find association: #{association_name}" unless reflection
target_model = reflection.klass
old_matched_ids = target_model.where(reflection.foreign_key => self.id).unscope(:offset, :limit).pluck(:id).to_a
record = send("create_#{association_name}!", attrs)
target_model.where(id: old_matched_ids).unscope(:offset, :limit).update_all(reflection.foreign_key => self.id)
record
end
# does a #create! and automatically fills the column with a value that matches the merge of the condition on
# the matching association of each passed source_associations
def create_assoc!(association_name, *source_associations)
options = source_associations.extract_options!
options = options.reverse_merge(allow_no_source: false, adhoc_value: nil, skip_default: false, use_bad_type: false)
raise "Must be a direct association, not #{association_name.inspect}" unless association_name =~ /^[mobz]p?l?\d+$/
raise "Need at least one source model or a nil instead" if !options[:allow_no_source] && source_associations.empty?
source_associations = source_associations.compact
association_name = ActiveRecordWhereAssoc::ActiveRecordCompat.normalize_association_name(association_name)
association_macro = association_name.to_s[/^[a-z]+/]
reflection = self.class.reflections[association_name]
raise "Didn't find association: #{association_name}" unless reflection
target_model = options[:target_model] || reflection.klass
if options[:skip_attributes]
attributes = {}
else
condition_value = target_model.test_condition_value_for(:default_scope) unless options[:skip_default]
if source_associations.present?
condition_value ||= 1
condition_value *= TestHelpers.condition_value_result_for(*source_associations)
end
attributes = { target_model.test_condition_column => condition_value,
target_model.adhoc_column_name => options[:adhoc_value],
}
end
case association_macro
when /mp?l?/, "z"
record = send(association_name).create!(attributes)
when /op?l?/
# Creating a has_one like this removes the id of the previously existing records that were refering.
# We don't want that for the purpose of our tests
record = create_has_one!(association_name, attributes)
when "b"
record = send("create_#{association_name}!", attributes)
save! # Must save that our id that just changed
when "bp"
record = target_model.create(attributes)
update!(reflection.foreign_key => record.id, reflection.foreign_type => target_model.base_class.name)
else
raise "Unexpected macro: #{association_macro}"
end
if options[:use_bad_type]
case association_macro
when "mp", "op"
record.update(:"has_#{record.class.table_name}_poly_type" => "PolyBadRecord")
when "bp"
update(:"#{self.class.table_name}_belongs_to_poly_type" => "PolyBadRecord")
end
end
record
end
# Receives the same parameters as #create_assoc!, but creates a record for every
# combinations missing one of the source models and the default scope
def create_bad_assocs!(association_name, *source_associations, &block)
options = source_associations.extract_options!
source_models = source_associations.compact
assocs_options = []
wrong_combinations = source_associations.combination(source_associations.size - 1)
wrong_combinations.each do |wrong_combination|
assocs_options << [association_name, *wrong_combination, allow_no_source: true]
end
assocs_options << [association_name, *source_models, allow_no_source: true, skip_default: true]
assocs_options << [association_name, *source_models, allow_no_source: true, use_bad_type: true] if association_name =~ /^.p\d/
records = []
assocs_options.each do |assoc_options|
records << create_assoc!(*assoc_options[0...-1], options.merge(assoc_options[-1]))
if block
yield records.last
records.last.destroy
end
end
if block
nil
else
records
end
end
end
| 38.995074 | 130 | 0.743305 | 3.03125 |
330597e751b125d41e61a3b1d6607b8ceee7379c | 21,902 | py | Python | annotator_web.py | j20100/Seg_Annotator | 49b2806be9450c901cf4977633a4ec29b3b6bdca | [
"CC-BY-4.0"
] | null | null | null | annotator_web.py | j20100/Seg_Annotator | 49b2806be9450c901cf4977633a4ec29b3b6bdca | [
"CC-BY-4.0"
] | null | null | null | annotator_web.py | j20100/Seg_Annotator | 49b2806be9450c901cf4977633a4ec29b3b6bdca | [
"CC-BY-4.0"
] | null | null | null | #!/usr/bin/python3
# -*- coding: utf-8 -*-
import argparse
import base64
from bson import ObjectId
import datetime
from flask import Flask, Markup, Response, abort, escape, flash, redirect, \
render_template, request, url_for
from flask_login import LoginManager, UserMixin, current_user, login_required, \
login_user, logout_user
from werkzeug.utils import secure_filename
from functools import wraps
from gridfs import GridFS
from jinja2 import evalcontextfilter
from binascii import a2b_base64
from OpenSSL import SSL
from flask import session
from flask_socketio import SocketIO, emit
import json
import hashlib
import pandas as pd
import pymongo
import re
import subprocess
import threading
import time
import uuid
import urllib.parse
import webcolors
import os
import time
import glob
from flask_cors import CORS
curr_annotated_img = []
def hash_password(password):
"""This function hashes the password with SHA256 and a random salt"""
salt = uuid.uuid4().hex
return hashlib.sha256(salt.encode() + password.encode()).hexdigest() + ':' + salt
def check_password(hashed_password, user_password):
"""This function checks a password against a SHA256:salt entry"""
password, salt = hashed_password.split(':')
return password == hashlib.sha256(salt.encode() + user_password.encode()).hexdigest()
def admin_required(func):
"""Function wrapper to allow only logged in admins to access the page."""
@wraps(func)
def decorated_function(*args, **kwargs):
if not current_user.is_admin():
return redirect(url_for('bad_permissions'))
return func(*args, **kwargs)
return decorated_function
# Load default configuration from local file
with open('config.json') as config:
conf = argparse.Namespace(**json.load(config))
# Argument parser strings
app_description = "annotator Website Application\n\n" \
"All information can be found at https://github.com/seg_annotator.\n" \
"Modify file 'config.json' to edit the application's configuration.\n" \
"There are other command line arguments that can be used:"
help_host = "Hostname of the Flask app. Default: {0}".format(conf.app_host)
help_port = "Port of the Flask app. Default: {0}".format(conf.app_port)
help_debug = "Start Flask app in debug mode. Default: {0}".format(conf.debug)
# Set up the command-line arguments
parser = argparse.ArgumentParser(description=app_description,
formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument('-H', '--app_host', help=help_host, default=conf.app_host)
parser.add_argument('-P', '--app_port', help=help_port, default=conf.app_port)
parser.add_argument('-D', '--debug', dest='debug', action='store_true', help=help_debug)
parser.set_defaults(debug=conf.debug)
# Update default configs with command line args
args = parser.parse_args()
conf.__dict__.update(args.__dict__)
# Get MongoDB Database Client
client = pymongo.MongoClient()
annotator = client['annotator']
fs = GridFS(annotator)
# Validate MongoDB is started, else exit
try:
client.server_info()
except pymongo.errors.ServerSelectionTimeoutError:
print('MongoDB is not started. Restart it before launching the web app again.')
quit()
# Create Flask Application
app = Flask(__name__)
CORS(app)
app.secret_key = uuid.uuid4().hex # Required to use log in and session manager
login_manager = LoginManager()
login_manager.init_app(app)
# ROS variable
ros_pid = None
socketio = SocketIO(app)
@socketio.on('disconnect')
def disconnect_user():
print('DISCONNECTING USER')
# user_logs = list(annotator.logs.find().skip((annotator.logs).count() - 1))
# user = user_logs[-1]
# annotator.logs.update_one(user, {'$set' : { 'stop_time' : time.time()}})
logout_user()
# session.pop(app.secret_key, None)
# User class
class User(UserMixin):
"""User Class making DB-stored parameters accessible from HTML templates."""
def __init__(self, username):
self.username = username
user = annotator.credentials.find_one({'username': username})
self.admin = user['admin']
self.nb_images = user['nb_images']
def get_id(self):
return self.username
def is_admin(self):
return self.admin
# Login Manager Configuration
@login_manager.user_loader
def load_user(user_id):
return User(user_id)
@login_manager.unauthorized_handler
def unauthorized_callback():
return redirect('/login?next=' + request.path)
# Application routes
@app.route('/')
def go_home():
return redirect(url_for('home'))
@app.route('/login', methods=['GET', 'POST'])
def login():
if request.method == 'POST':
next_page = request.args.get('next')
username = request.form['username']
password = request.form['password']
user = annotator.credentials.find_one({'username': username})
if user and check_password(user['password'], password):
if user['active']: # Inactived users should not be able to log in
login_user(User(username))
annotator.credentials.update_one(user, {'$set':
{'last_login' : time.time()}})
# If an admin logs in and there is at least one inactived user, show it
if user['admin'] and annotator.credentials.find_one({'active': False}):
flash('At least one user account has to be activated', 'info')
return redirect(url_for('manage_users'))
annotator.logs.insert_one({'start_time' : time.time(),
'username' : username,
'stop_time' : 0,
'nb_images' : 0})
return redirect(next_page or url_for('home'))
else:
flash('Account not yet activated by an administrator', 'warning')
else:
flash('Invalid credentials', 'danger')
return render_template('login.html')
else:
return render_template('login.html')
@app.route('/logout')
@login_required
def logout():
user_logs = list(annotator.logs.find().skip((annotator.logs).count() - 1))
user = user_logs[-1]
annotator.logs.update_one(user, {'$set' : { 'stop_time' : time.time()}})
logout_user()
return redirect(url_for('home'))
@app.route('/create_account', methods=['GET', 'POST'])
def create_account():
if request.method == 'POST':
next = request.args.get('next')
username = request.form['username'].strip()
password = request.form['password']
password_confirm = request.form['password_confirm']
if not password:
flash('Password cannot be empty', 'danger')
return render_template('create_account.html')
if password != password_confirm:
flash('Both password entries do not match', 'danger')
return render_template('create_account.html')
if not username.replace('_', '').isalnum():
# Only allow letters, numbers and underscore characters in usernames
flash('Invalid username (letters, numbers and underscores only)', 'danger')
return render_template('create_account.html')
user = annotator.credentials.find_one({'username': username})
if user or not username: # Check if username is not empty or already taken
flash('Username not available', 'danger')
return render_template('create_account.html')
active = False
admin = False
# If this is the first user to register, make it active and admin
if not annotator.credentials.find_one():
active = True
admin = True
flash('First account created, activated and is administrator, congratulations!', 'success')
# Create a new user account
annotator.credentials.insert_one({'username': username,
'password': hash_password(password),
'active': active,
'nb_images' : 0,
'admin': admin})
flash('Account created successfully', 'success')
return redirect(url_for('login'))
else:
return render_template('create_account.html')
@app.route('/change_password', methods=['GET', 'POST'])
def change_password():
if request.method == 'POST':
username = request.form['username']
old_password = request.form['old_password']
new_password = request.form['new_password']
user = annotator.credentials.find_one({'username': username})
if user and check_password(user['password'], old_password):
if not new_password:
flash('Password cannot be empty', 'danger')
return render_template('change_password.html')
# Modify password
annotator.credentials.update_one(user, {'$set': {
'password': hash_password(new_password)}})
flash('Password changed successfully', 'success')
return redirect(url_for('login'))
else:
flash('Invalid credentials', 'danger')
return render_template('change_password.html')
else:
return render_template('change_password.html')
@app.route('/home')
def home():
return render_template('index.html')
def sortKeyFunc(s):
t = s.split('/')
k=t[3].split('.')
s=k[0].split('_')
return int(s[2])
@app.route('/load_new_img', methods = ['POST'])
def uploader_new_img():
if request.method == 'POST':
global curr_annotated_img
directory = "static/data/annotations/"
searchlabel = os.path.join(directory, "*.png" )
with open('/home/jonathan/Seg_Annotator/static/data/dataset.json') as f:
data = json.load(f)
print(data)
fileslabel = glob.glob(searchlabel)
fileslabel.sort(key=sortKeyFunc)
i = 0
print("Doin the currently annotated img now")
print(curr_annotated_img)
print(fileslabel[i])
while fileslabel[i] in curr_annotated_img :
i=i+1
print("THIS ONE PASSED")
print(fileslabel[i])
newImgAnnot = fileslabel[i]
t = fileslabel[i].split('/')
#print(t)
newImg=t[0]+"/"+t[1]+"/"+"images"+"/"+t[3]
#print("Sending new img")
#print(newImg)
#print("Sending new img annot")
#print(newImgAnnot)
send = newImg+":"+newImgAnnot
#print(send)
curr_annotated_img.append(newImgAnnot)
return send
@app.route('/uploader', methods = ['POST'])
def uploader_file():
if request.method == 'POST':
pic = request.form['file']
username = request.form['username']
filename = request.form['filename']
#f.save(secure_filename(f.filename))
up = urllib.parse.urlparse(pic)
head, data = up.path.split(',', 1)
bits = head.split(';')
mime_type = bits[0] if bits[0] else 'text/plain'
charset, b64 = 'ASCII', False
for bit in bits:
if bit.startswith('charset='):
charset = bit[8:]
elif bit == 'base64':
b64 = True
binary_data = a2b_base64(data)
directory = "static/data/annotations/"
test = os.listdir( directory )
for item in test:
if item.startswith(filename):
os.remove( os.path.join( directory, item ) )
timestr = time.strftime("%Y%m%d-%H%M%S")
with open("static/data/annotations/" + filename + "_corrected_" + timestr, 'wb') as f:
f.write(binary_data)
user = annotator.credentials.find_one({'username': username})
user_logs = list(annotator.logs.find().skip((annotator.logs).count() - 1))
user_stats = user_logs[-1]
nb_images = user['nb_images']
nb_images = nb_images + 1
nb_images_stats = user_stats['nb_images']
nb_images_stats = nb_images_stats + 1
annotator.logs.update_one(user_stats, {'$set': {'nb_images': nb_images_stats}})
annotator.credentials.update_one(user, {'$set': {'nb_images': nb_images}})
searchlabel = os.path.join(directory, "*.png" )
fileslabel = glob.glob(searchlabel)
fileslabel.sort()
return "Done sending imges"
@app.route('/updater', methods = ['POST'])
def updater_URL():
if request.method == 'POST':
annotURL = request.form["URL"]
directory = "static/data/annotations/"
test = os.listdir(directory)
realURL = "NONE"
for item in test:
if item.startswith(annotURL[25:]):
realURL = item
return "static/data/annotations/" + realURL
@app.route('/annotator')
@login_required
def annotator_edit():
username = current_user.get_id()
return render_template('annotator.html', username=username)
@app.route('/dataset')
@login_required
def dataset():
username = current_user.get_id()
return render_template('dataset.html', username=username)
@app.route('/logs')
@admin_required
def logs():
logs = list(annotator.logs.find())
return render_template('logs.html', logs=logs)
@app.route('/logs/<start_time>')
def log_highlights(start_time):
if not valid_protocol(start_time):
return redirect(url_for('logs'))
# Get database of current protocol
db = client[protocol]
started = db.steps.count()
done = db.steps.count({'end': {'$exists': True}})
info = db.protocol.find_one()
json_protocol = {}
if info:
# Pretty print the raw protocol
json_protocol = json.dumps(info['protocol'], indent=4, sort_keys=True)
return render_template('log_highlights.html', active='Highlights', \
protocol=protocol, json_protocol=json_protocol, \
started=started, done=done, db=db)
@app.route('/logs/delete/<id>')
@login_required
@admin_required
def delete_logs(id):
# Delete all data from current protocol
print('DELETING THE LOG')
test = annotator.logs.find()
print(test)
test_list = list(annotator.logs.find())
print(test_list)
one = annotator.test_list.find({'_id' : id})
print(one)
annotator.logs.remove({})
flash("Entry {0} deleted successfully".format(id), 'info')
return redirect(url_for('logs'))
@app.route('/manage_users')
@login_required
@admin_required
def manage_users():
user_list = list(annotator.credentials.find())
return render_template('manage_users.html', users=user_list)
@app.route('/manage_users/activate/<username>')
@login_required
@admin_required
def activate_user(username):
"""Activate a user account."""
user = annotator.credentials.find_one({'username': username})
if not user['active']:
annotator.credentials.update_one(user, {'$set': {'active': True}})
flash("User {0} activated successfully".format(username), 'success')
else:
flash("User {0} is already active".format(username), 'warning')
return redirect(url_for('manage_users'))
@app.route('/manage_users/demote/<username>')
@login_required
@admin_required
def demote_user(username):
"""Remove admin privileges of another administrator."""
user = annotator.credentials.find_one({'username': username})
if current_user.get_id() == username:
flash('Cannot revert yourself to standard user', 'danger')
elif user:
if user['admin']:
annotator.credentials.update_one(user, {'$set': {'admin': False}})
flash("User {0} reverted to standard user successfully".format(username), 'info')
else:
flash("User {0} is already a standard user".format(username), 'warning')
else:
flash("Cannot revert unknown user {0} to standard user".format(username), 'warning')
return redirect(url_for('manage_users'))
@app.route('/manage_users/promote/<username>')
@login_required
@admin_required
def promote_user(username):
"""Give admin privileges from a normal user."""
user = annotator.credentials.find_one({'username': username})
if user:
if user['admin']:
flash("User {0} is already an administrator".format(username), 'warning')
else:
annotator.credentials.update_one(user, {'$set': {'admin': True}})
flash("User {0} promoted to administrator successfully".format(username), 'info')
else:
flash("Cannot promote unknown user {0} to administrator".format(username), 'warning')
return redirect(url_for('manage_users'))
@app.route('/manage_users/delete/<username>')
@login_required
@admin_required
def delete_user(username):
"""Delete a user account that is not yours."""
user = annotator.credentials.find_one({'username': username})
if current_user.get_id() == username:
flash('Cannot delete yourself', 'danger')
elif user:
annotator.credentials.delete_one(user)
flash("User {0} deleted successfully".format(username), 'info')
else:
flash("Cannot delete unknown user {0}".format(username), 'warning')
return redirect(url_for('manage_users'))
@app.route('/bad_permissions')
def bad_permissions():
"""Function called if a normal user tries to get to an admin reserved page."""
return render_template('bad_permissions.html')
@app.errorhandler(404)
def page_not_found(error):
"""This method handles all unexisting route requests."""
return render_template('404.html'), 404
# Add objects that can be called from the Jinja2 HTML templates
@app.template_filter()
@evalcontextfilter
def nl2br(eval_ctx, value):
"""Converts new lines to paragraph breaks in HTML."""
_paragraph_re = re.compile(r'(?:\r\n|\r|\n){2,}')
result = '\n\n'.join('<p>%s</p>' % p.replace('\n', '<br>\n') \
for p in _paragraph_re.split(escape(value)))
result = result.replace(' ', ' ')
if eval_ctx.autoescape:
result = Markup(result)
return result
def crossdomain(origin=None, methods=None, headers=None, max_age=21600,
attach_to_all=True, automatic_options=True):
"""Decorator function that allows crossdomain requests.
Courtesy of
https://blog.skyred.fi/articles/better-crossdomain-snippet-for-flask.html
"""
if methods is not None:
methods = ', '.join(sorted(x.upper() for x in methods))
if headers is not None and not isinstance(headers, basestring):
headers = ', '.join(x.upper() for x in headers)
if not isinstance(origin, basestring):
origin = ', '.join(origin)
if isinstance(max_age, timedelta):
max_age = max_age.total_seconds()
def get_methods():
""" Determines which methods are allowed
"""
if methods is not None:
return methods
options_resp = current_app.make_default_options_response()
return options_resp.headers['allow']
def decorator(f):
"""The decorator function
"""
def wrapped_function(*args, **kwargs):
"""Caries out the actual cross domain code
"""
if automatic_options and request.method == 'OPTIONS':
resp = current_app.make_default_options_response()
else:
resp = make_response(f(*args, **kwargs))
if not attach_to_all and request.method != 'OPTIONS':
return resp
h = resp.headers
h['Access-Control-Allow-Origin'] = origin
h['Access-Control-Allow-Methods'] = get_methods()
h['Access-Control-Max-Age'] = str(max_age)
h['Access-Control-Allow-Credentials'] = 'true'
h['Access-Control-Allow-Headers'] = \
"Origin, X-Requested-With, Content-Type, Accept, Authorization"
if headers is not None:
h['Access-Control-Allow-Headers'] = headers
return resp
f.provide_automatic_options = False
return update_wrapper(wrapped_function, f)
return decorator
def convert_ts(ts):
"""Convert timestamp to human-readable string"""
return datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d_%H:%M:%S')
def format_sidebar(name, icon, url):
"""
Used to generate HTML line for sidebar in layout.html.
- name is the name of the tab
- icon is the glyphicon name
"""
current_url = request.path.split('/')[1]
active = ' class="active"' if url == current_url else ''
html = '<li{0}><a href="/{1}"><i style="float:left; margin-right: 14px;">' \
'<span class="glyphicon glyphicon-{2}"></span></i>{3}' \
'</a></li>'.format(active, url, icon, name)
return Markup(html)
# Make some variables and functions available from Jinja2 HTML templates
app.jinja_env.globals.update(conf=conf,
force_type = Markup('onselect="return false" ' \
'onpaste="return false" ' \
'oncopy="return false" ' \
'oncut="return false" ' \
'ondrag="return false" ' \
'ondrop="return false" ' \
'autocomplete=off'),
format_sidebar=format_sidebar,
convert_ts=convert_ts)
# Start the application
if __name__ == '__main__':
#context = SSL.Context(SSL.TLSv1_2_METHOD)
#context.use_privatekey_file('host.key')
#context.use_certificate_file('host.cert')
socketio.run(app, host=conf.app_host, port=int(conf.app_port), ssl_context=('cert.pem', 'key.pem'))
| 34.98722 | 103 | 0.630079 | 3.1875 |
aeeb164f3196df5d2c983cc8756ef9c6cb06c63c | 1,845 | rs | Rust | tarpc/src/util.rs | slickbench/tarpc | 4b7b16345a393af07304a3b184a6660792a2d6ca | [
"MIT"
] | null | null | null | tarpc/src/util.rs | slickbench/tarpc | 4b7b16345a393af07304a3b184a6660792a2d6ca | [
"MIT"
] | null | null | null | tarpc/src/util.rs | slickbench/tarpc | 4b7b16345a393af07304a3b184a6660792a2d6ca | [
"MIT"
] | null | null | null | // Copyright 2018 Google LLC
//
// Use of this source code is governed by an MIT-style
// license that can be found in the LICENSE file or at
// https://opensource.org/licenses/MIT.
use std::{
collections::HashMap,
hash::{BuildHasher, Hash},
time::{Duration, SystemTime},
};
#[cfg(feature = "serde1")]
#[cfg_attr(docsrs, doc(cfg(feature = "serde1")))]
pub mod serde;
/// Extension trait for [SystemTimes](SystemTime) in the future, i.e. deadlines.
pub trait TimeUntil {
/// How much time from now until this time is reached.
fn time_until(&self) -> Duration;
}
impl TimeUntil for SystemTime {
fn time_until(&self) -> Duration {
self.duration_since(SystemTime::now()).unwrap_or_default()
}
}
/// Collection compaction; configurable `shrink_to_fit`.
pub trait Compact {
/// Compacts space if the ratio of length : capacity is less than `usage_ratio_threshold`.
fn compact(&mut self, usage_ratio_threshold: f64);
}
impl<K, V, H> Compact for HashMap<K, V, H>
where
K: Eq + Hash,
H: BuildHasher,
{
fn compact(&mut self, usage_ratio_threshold: f64) {
let usage_ratio_threshold = usage_ratio_threshold.clamp(f64::MIN_POSITIVE, 1.);
let cap = f64::max(1000., self.len() as f64 / usage_ratio_threshold);
self.shrink_to(cap as usize);
}
}
#[test]
fn test_compact() {
let mut map = HashMap::with_capacity(2048);
assert_eq!(map.capacity(), 3584);
// Make usage ratio 25%
for i in 0..896 {
map.insert(format!("k{}", i), "v");
}
map.compact(-1.0);
assert_eq!(map.capacity(), 3584);
map.compact(0.25);
assert_eq!(map.capacity(), 3584);
map.compact(0.50);
assert_eq!(map.capacity(), 1792);
map.compact(1.0);
assert_eq!(map.capacity(), 1792);
map.compact(2.0);
assert_eq!(map.capacity(), 1792);
}
| 25.625 | 94 | 0.64878 | 3.046875 |
282d7a20637ef5424c62014c2108bf41ec668fcc | 2,423 | lua | Lua | InGameAvatarEditor/src/ServerScriptService/AvatarEditorInGameSetup/AvatarEditorInGame/Modules/Packages/UIBlox/App/InputButton/Checkbox.spec.lua | MirayXS/avatar | 7c78513fbe9587915700a0a5fd3c15d5f23596d2 | [
"RSA-MD"
] | 41 | 2021-04-30T18:27:45.000Z | 2022-03-23T21:12:57.000Z | InGameAvatarEditor/src/ServerScriptService/AvatarEditorInGameSetup/AvatarEditorInGame/Modules/Packages/UIBlox/App/InputButton/Checkbox.spec.lua | MirayXS/avatar | 7c78513fbe9587915700a0a5fd3c15d5f23596d2 | [
"RSA-MD"
] | 3 | 2021-08-24T20:07:47.000Z | 2022-02-15T19:40:13.000Z | InGameAvatarEditor/src/ServerScriptService/AvatarEditorInGameSetup/AvatarEditorInGame/Modules/Packages/UIBlox/App/InputButton/Checkbox.spec.lua | MirayXS/avatar | 7c78513fbe9587915700a0a5fd3c15d5f23596d2 | [
"RSA-MD"
] | 25 | 2021-05-02T14:33:04.000Z | 2022-03-17T20:28:07.000Z | return function()
local Packages = script.Parent.Parent.Parent.Parent
local Roact = require(Packages.Roact)
local Checkbox = require(script.Parent.Checkbox)
local mockStyleComponent = require(Packages.UIBlox.Utility.mockStyleComponent)
local Images = require(Packages.UIBlox.App.ImageSet.Images)
describe("lifecycle", function()
it("should mount and unmount without issue", function()
local frame = Instance.new("Frame")
local element = mockStyleComponent({
checkbox = Roact.createElement(Checkbox, {
text = "something",
onActivated = function () end,
size = UDim2.new(1, 0, 1, 0),
layoutOrder = 1,
})
})
local instance = Roact.mount(element, frame, "Checkbox")
Roact.unmount(instance)
end)
it("should have a hollow squircle as its false image", function()
local frame = Instance.new("Frame")
local element = mockStyleComponent({
checkbox = Roact.createElement(Checkbox, {
text = "something",
onActivated = function () end,
size = UDim2.new(1, 0, 1, 0),
layoutOrder = 1,
})
})
local instance = Roact.mount(element, frame, "Checkbox")
local image = frame:FindFirstChildWhichIsA("ImageButton", true)
Roact.update(instance, element)
expect(image.ImageRectOffset).to.equal(Images["squircles/hollow"].ImageRectOffset)
Roact.unmount(instance)
end)
it("should have a filled squircle as its true image", function()
local frame = Instance.new("Frame")
local element = mockStyleComponent({
checkbox = Roact.createElement(Checkbox, {
text = "something",
isSelected = true,
onActivated = function () end,
size = UDim2.new(1, 0, 1, 0),
layoutOrder = 1,
})
})
local instance = Roact.mount(element, frame, "Checkbox")
local image = frame:FindFirstChildWhichIsA("ImageButton", true)
expect(image.ImageRectOffset).to.equal(Images["squircles/fill"].ImageRectOffset)
Roact.unmount(instance)
end)
end)
describe("props", function()
it("should accept and assign a ref", function()
local ref = Roact.createRef()
local element = mockStyleComponent({
checkbox = Roact.createElement(Checkbox, {
text = "something",
onActivated = function () end,
[Roact.Ref] = ref
})
})
local instance = Roact.mount(element)
expect(ref.current).to.be.ok()
expect(ref.current:IsA("Instance")).to.be.ok()
Roact.unmount(instance)
end)
end)
end | 31.064103 | 85 | 0.682625 | 3 |
Subsets and Splits