file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
test_examples_app.py | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2015-2018 CERN.
#
# cds-migrator-kit is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Test example app."""
import os
import signal
import subprocess
import time
from os.path import abspath, dirname, join
import pytest
@pytest.yield_fixture
def example_app():
"""Example app fixture."""
current_dir = os.getcwd()
# Go to example directory
project_dir = dirname(dirname(abspath(__file__)))
exampleapp_dir = join(project_dir, 'examples')
os.chdir(exampleapp_dir)
# Setup application
assert subprocess.call('./app-setup.sh', shell=True) == 0
# Start example app
webapp = subprocess.Popen(
'FLASK_APP=app.py flask run --debugger -p 5000',
stdout=subprocess.PIPE, preexec_fn=os.setsid, shell=True)
time.sleep(10)
yield webapp
# Stop server
os.killpg(webapp.pid, signal.SIGTERM)
# Return to the original directory
os.chdir(current_dir)
def test_example_app_role_admin(example_app):
| """Test example app."""
cmd = 'curl http://0.0.0.0:5000/'
output = subprocess.check_output(cmd, shell=True)
assert b'migrator' in output |
|
mod.rs | use std::{
io,
mem::MaybeUninit,
os::unix::io::{AsRawFd, RawFd},
ptr::addr_of,
};
use crate::{check_non_neg1, epoll::tcp::AsyncTcpListener};
mod tcp;
unsafe fn | (fd: RawFd) -> io::Result<()> {
let status = check_non_neg1!(libc::fcntl(fd, libc::F_GETFL));
check_non_neg1!(libc::fcntl(fd, libc::F_SETFL, status | libc::O_NONBLOCK));
Ok(())
}
pub fn example_from_man_page() -> io::Result<()> {
let listener = AsyncTcpListener::bind_any(8888)?;
println!("Created listener {listener:?}");
unsafe {
let listen_sock = listener.as_raw_fd();
let epollfd = check_non_neg1!(libc::epoll_create1(0));
println!("Created epoll instance");
let mut ev = libc::epoll_event {
events: libc::EPOLLIN as _,
u64: listen_sock as _,
};
check_non_neg1!(libc::epoll_ctl(
epollfd,
libc::EPOLL_CTL_ADD,
listen_sock,
&mut ev
));
loop {
let mut events = [libc::epoll_event { events: 0, u64: 0 }; 16];
let nfds = check_non_neg1!(libc::epoll_wait(
epollfd,
events.as_mut_ptr(),
events.len() as _,
-1,
));
for event in &events[0..nfds as _] {
if event.u64 == listen_sock as _ {
// our TCP listener received a new connection
let mut peer_sockaddr = MaybeUninit::uninit();
let mut sockaddr_size = 0;
let conn_sock = check_non_neg1!(libc::accept(
listener.as_raw_fd(),
peer_sockaddr.as_mut_ptr(),
&mut sockaddr_size,
));
make_nonblock(conn_sock)?;
let mut ev = libc::epoll_event {
events: (libc::EPOLLIN | libc::EPOLLET) as _,
u64: conn_sock as _,
};
check_non_neg1!(libc::epoll_ctl(
epollfd,
libc::EPOLL_CTL_ADD,
conn_sock,
&mut ev
));
println!("Received new connection! (fd: {conn_sock})");
} else {
println!(
"something else happened! {}",
addr_of!(event.u64).read_unaligned()
);
}
}
}
}
}
| make_nonblock |
index.ts | import { HttpOptions } from "../lib/index";
export interface HttpAgent {
(request: HttpOptions): Promise<HttpResponse>;
} | } |
export interface HttpResponse {
httpStatus: number;
body: {}; |
jobqueue.go | // Use of gjp source code is governed by a MIT license
// license that can be found in the LICENSE file.
/*
Package gjp stands for Go JobPool, and is willing to be a simple jobpool manager. It maintains
a number of queues determined at the init. No priority whatsoever, just every queues are
processing one job at a time.
*/
package gjp
import (
"container/list"
"fmt"
"time"
"strings"
"errors"
"encoding/json"
)
/*
TYPES
*/
type (
// JobQueue is structure to control jobs queues
JobQueue struct {
Jobs *list.List `json:"jobsWaiting"`//list of waiting jobs
executionChannel chan *Job `json:"-"` //Channel to contain current job to execute in queue
reportChannel chan *Job `json:"-"`//Channel taking job back when its execution has finished
working bool `json:"working"`//Indicate whether or not the queue is working
jobsRemaining int `json:"jobsRemaining"`//Remaining jobs in the queue
totalExecutionTime time.Duration `json:"executionTime"`
}
)
//lock queue while executing
func (jq *JobQueue) lockQueue() {
jq.working = true
}
//unlock queue when jobs are done
func (jq *JobQueue) unlockQueue() {
jq.working = false
}
//Remove job from currentQueue
func (jq *JobQueue) dequeueJob(e *list.Element) {
jq.Jobs.Remove(e)
}
//execute current joblist
func (jq *JobQueue) executeJobQueue() {
defer catchPanic("executeJobQueue")
for jq.jobsRemaining > 0 {
//Always take the first job in queue
j := jq.Jobs.Front().Value.(*Job)
//start job execution
go jq.launchJobExecution()
//put jo in the executionChannel
jq.executionChannel <- j
//Retrieve the job report from the reportChannel
//Waiting until job is finished
jobReport := <-jq.reportChannel
//Checking status on report
switch jobReport.Status {
//Through an error if failed
case failed:
if jobReport.HasJobErrored() {
fmt.Println(jobReport.GetJobError())
} else {
fmt.Println(jobReport.GetJobError())
fmt.Println(jobReport.GetJobName(),
"panicked after an execution of",
jobReport.getExecutionTime())
}
break
case success:
fmt.Println("Job",
jobReport.GetJobName(), | jobReport.getExecutionTime())
break
}
//Since job is done remove it from the jobs queue
jq.dequeueJob(jq.Jobs.Front())
jq.jobsRemaining -= 1
//Go to the next job
}
//unlock queue to allow new jobs to be push to it
jq.unlockQueue()
return
}
//Launch the JobExecution
func (jq *JobQueue) launchJobExecution() {
defer catchPanic("launchJobExecution")
//Retrieve job from execution channel of the queue
j := <-jq.executionChannel
//execute the job synchronously with time starter
j.executeJob(time.Now())
//add this time to the queue execution time
jq.totalExecutionTime += j.getExecutionTime()
//Send job to the report channel
jq.reportChannel <- j
return
}
/*
GETTERS & SETTERS
*/
func (jq *JobQueue) GetJobFromJobId(jobId string) (j *Job, err error) {
for e := jq.Jobs.Front(); e != nil; e = e.Next() {
job := e.Value.(*Job)
if strings.Contains(jobId, job.getJobStringId()) == true {
j = job
return
}
}
err = errors.New("Job not found")
return
}
func (jq *JobQueue) GetJobsWaiting() (jobList string) {
jlArray, err := json.Marshal(jq)
if err != nil {
fmt.Println("Error while processing serialization on jobs waiting :",
err.Error())
}
fmt.Println(jlArray)
jobList = string(jlArray[:])
return
} | "executed in", |
test.ts | /*
* @license Apache-2.0
*
* Copyright (c) 2021 The Stdlib Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// tslint:disable: no-construct
// tslint:disable: no-unused-expression
import Number = require( './index' );
// TESTS //
// The function returns a number object...
{
new Number( 10 ); // $ExpectType Number
Number( 15 ); // $ExpectType number
} | // The function does not compile if provided an unsupported number of arguments...
{
Number( 10, 15 ); // $ExpectError
} | |
main.rs | use amethyst::{
assets::{AssetStorage, Loader},
core::{
math::{Point3, Vector2, Vector3},
Named, Parent, Time, Transform, TransformBundle,
},
ecs::{
Component, Entities, Entity, Join, LazyUpdate, NullStorage, Read, ReadExpect, ReadStorage,
System, WriteStorage,
},
input::{is_close_requested, is_key_down, InputBundle, InputHandler, StringBindings},
prelude::*,
renderer::{
camera::{ActiveCamera, Camera, Projection},
debug_drawing::DebugLinesComponent,
formats::texture::ImageFormat,
palette::Srgba,
sprite::{SpriteRender, SpriteSheet, SpriteSheetFormat, SpriteSheetHandle},
transparent::Transparent,
types::DefaultBackend,
RenderDebugLines, RenderFlat2D, RenderToWindow, RenderingBundle, Texture,
},
tiles::{MortonEncoder, RenderTiles2D, Tile, TileMap},
utils::application_root_dir,
window::ScreenDimensions,
winit,
};
#[derive(Default)]
struct Player;
impl Component for Player {
type Storage = NullStorage<Self>;
}
#[derive(Default)]
pub struct DrawSelectionSystem {
start_coordinate: Option<Point3<f32>>,
}
impl<'s> System<'s> for DrawSelectionSystem {
type SystemData = (
Entities<'s>,
Read<'s, ActiveCamera>,
ReadExpect<'s, ScreenDimensions>,
ReadStorage<'s, Camera>,
ReadStorage<'s, Transform>,
WriteStorage<'s, DebugLinesComponent>,
Read<'s, InputHandler<StringBindings>>,
);
fn run(
&mut self,
(entities, active_camera, dimensions, cameras, transforms, mut debug_lines, input): Self::SystemData,
) {
if let Some(lines) = (&mut debug_lines).join().next() {
lines.clear();
if let Some(mouse_position) = input.mouse_position() {
let mut camera_join = (&cameras, &transforms).join();
if let Some((camera, camera_transform)) = active_camera
.entity
.and_then(|a| camera_join.get(a, &entities))
.or_else(|| camera_join.next())
{
let action_down = input
.action_is_down("select")
.expect("selection action missing");
if action_down && self.start_coordinate.is_none() {
// Starting a new selection
self.start_coordinate = Some(Point3::new(
mouse_position.0,
mouse_position.1,
camera_transform.translation().z,
));
} else if action_down && self.start_coordinate.is_some() {
// Active drag
let screen_dimensions =
Vector2::new(dimensions.width(), dimensions.height());
let end_coordinate = Point3::new(
mouse_position.0,
mouse_position.1,
camera_transform.translation().z,
);
let mut start_world = camera.projection().screen_to_world_point(
self.start_coordinate.expect("Wut?"),
screen_dimensions,
camera_transform,
);
let mut end_world = camera.projection().screen_to_world_point(
end_coordinate,
screen_dimensions,
camera_transform,
);
start_world.z = 0.9;
end_world.z = 0.9;
lines.add_box(start_world, end_world, Srgba::new(0.5, 0.05, 0.65, 1.0));
} else if !action_down && self.start_coordinate.is_some() {
// End drag, remove
self.start_coordinate = None;
}
}
}
}
}
}
pub struct CameraSwitchSystem {
pressed: bool,
}
impl Default for CameraSwitchSystem {
fn default() -> Self {
Self { pressed: false }
}
}
impl<'s> System<'s> for CameraSwitchSystem {
type SystemData = (
Entities<'s>,
Read<'s, LazyUpdate>,
Read<'s, ActiveCamera>,
ReadExpect<'s, ScreenDimensions>,
ReadStorage<'s, Camera>,
ReadStorage<'s, Transform>,
ReadStorage<'s, Parent>,
Read<'s, InputHandler<StringBindings>>,
);
fn run(
&mut self,
(entities, lazy, active_camera, dimensions, cameras, transforms, parents, input): Self::SystemData,
) {
if input.action_is_down("camera_switch").unwrap() {
self.pressed = true;
}
if self.pressed && !input.action_is_down("camera_switch").unwrap() {
self.pressed = false;
// Lazily delete the old camera
let mut camera_join = (&entities, &cameras, &transforms, &parents).join();
let (old_camera_entity, old_camera, _, old_parent) = active_camera
.entity
.and_then(|a| camera_join.get(a, &entities))
.or_else(|| camera_join.next())
.unwrap();
let old_camera_entity = old_camera_entity;
let new_parent = old_parent.entity;
let (new_camera, new_position) = match old_camera.projection() {
Projection::Orthographic(_) => (
Camera::standard_3d(dimensions.width(), dimensions.height()),
Vector3::new(0.0, 0.0, 500.1),
),
Projection::Perspective(_) => (
Camera::standard_2d(dimensions.width(), dimensions.height()),
Vector3::new(0.0, 0.0, 1.1),
),
Projection::CustomMatrix(_) => unimplemented!(),
};
lazy.exec_mut(move |w| {
let new_camera =
init_camera(w, new_parent, Transform::from(new_position), new_camera);
w.fetch_mut::<ActiveCamera>().entity = Some(new_camera);
w.delete_entity(old_camera_entity).unwrap();
});
}
}
}
#[derive(Default)]
pub struct CameraMovementSystem;
impl<'s> System<'s> for CameraMovementSystem {
type SystemData = (
Read<'s, ActiveCamera>,
Entities<'s>,
ReadStorage<'s, Camera>,
WriteStorage<'s, Transform>,
Read<'s, InputHandler<StringBindings>>,
);
fn run(&mut self, (active_camera, entities, cameras, mut transforms, input): Self::SystemData) {
let x_move = input.axis_value("camera_x").unwrap();
let y_move = input.axis_value("camera_y").unwrap();
let z_move = input.axis_value("camera_z").unwrap();
let z_move_scale = input.axis_value("camera_scale").unwrap();
if x_move != 0.0 || y_move != 0.0 || z_move != 0.0 || z_move_scale != 0.0 {
let mut camera_join = (&cameras, &mut transforms).join();
if let Some((_, camera_transform)) = active_camera
.entity
.and_then(|a| camera_join.get(a, &entities))
.or_else(|| camera_join.next())
{
camera_transform.prepend_translation_x(x_move * 5.0);
camera_transform.prepend_translation_y(y_move * 5.0);
camera_transform.prepend_translation_z(z_move);
let z_scale = 0.01 * z_move_scale;
let scale = camera_transform.scale();
let scale = Vector3::new(scale.x + z_scale, scale.y + z_scale, scale.z + z_scale);
camera_transform.set_scale(scale);
}
}
}
}
struct MapMovementSystem {
rotate: bool,
translate: bool,
vector: Vector3<f32>,
}
impl Default for MapMovementSystem {
fn default() -> Self {
Self {
rotate: false,
translate: false,
vector: Vector3::new(100.0, 0.0, 0.0),
}
}
}
impl<'s> System<'s> for MapMovementSystem {
type SystemData = (
Read<'s, Time>,
WriteStorage<'s, Transform>,
ReadStorage<'s, TileMap<ExampleTile>>,
Read<'s, InputHandler<StringBindings>>,
);
fn run(&mut self, (time, mut transforms, tilemaps, input): Self::SystemData) {
if input.action_is_down("toggle_rotation").unwrap() {
self.rotate ^= true;
}
if input.action_is_down("toggle_translation").unwrap() {
self.translate ^= true;
}
if self.rotate {
for (_, transform) in (&tilemaps, &mut transforms).join() {
transform.rotate_2d(time.delta_seconds());
}
}
if self.translate {
for (_, transform) in (&tilemaps, &mut transforms).join() {
transform.prepend_translation(self.vector * time.delta_seconds());
if transform.translation().x > 500.0 {
self.vector = Vector3::new(-100.0, 0.0, 0.0);
} else if transform.translation().x < -500.0 { | }
}
fn load_sprite_sheet(world: &mut World, png_path: &str, ron_path: &str) -> SpriteSheetHandle {
let texture_handle = {
let loader = world.read_resource::<Loader>();
let texture_storage = world.read_resource::<AssetStorage<Texture>>();
loader.load(png_path, ImageFormat::default(), (), &texture_storage)
};
let loader = world.read_resource::<Loader>();
let sprite_sheet_store = world.read_resource::<AssetStorage<SpriteSheet>>();
loader.load(
ron_path,
SpriteSheetFormat(texture_handle),
(),
&sprite_sheet_store,
)
}
// Initialize a sprite as a reference point at a fixed location
fn init_reference_sprite(world: &mut World, sprite_sheet: &SpriteSheetHandle) -> Entity {
let mut transform = Transform::default();
transform.set_translation_xyz(0.0, 0.0, 0.1);
let sprite = SpriteRender {
sprite_sheet: sprite_sheet.clone(),
sprite_number: 0,
};
world
.create_entity()
.with(transform)
.with(sprite)
.with(Transparent)
.named("reference")
.build()
}
// Initialize a sprite as a reference point
fn init_screen_reference_sprite(world: &mut World, sprite_sheet: &SpriteSheetHandle) -> Entity {
let mut transform = Transform::default();
transform.set_translation_xyz(-250.0, -245.0, 0.1);
let sprite = SpriteRender {
sprite_sheet: sprite_sheet.clone(),
sprite_number: 0,
};
world
.create_entity()
.with(transform)
.with(sprite)
.with(Transparent)
.named("screen_reference")
.build()
}
fn init_player(world: &mut World, sprite_sheet: &SpriteSheetHandle) -> Entity {
let mut transform = Transform::default();
transform.set_translation_xyz(0.0, 0.0, 0.1);
let sprite = SpriteRender {
sprite_sheet: sprite_sheet.clone(),
sprite_number: 1,
};
world
.create_entity()
.with(transform)
.with(Player)
.with(sprite)
.with(Transparent)
.named("player")
.build()
}
fn init_camera(world: &mut World, parent: Entity, transform: Transform, camera: Camera) -> Entity {
world
.create_entity()
.with(transform)
.with(Parent { entity: parent })
.with(camera)
.named("camera")
.build()
}
#[derive(Default, Clone)]
struct ExampleTile;
impl Tile for ExampleTile {
fn sprite(&self, _: Point3<u32>, _: &World) -> Option<usize> {
Some(1)
}
}
struct Example;
impl SimpleState for Example {
fn on_start(&mut self, data: StateData<'_, GameData<'_, '_>>) {
let world = data.world;
world.register::<Named>();
world.register::<Player>();
let circle_sprite_sheet_handle = load_sprite_sheet(
world,
"texture/Circle_Spritesheet.png",
"texture/Circle_Spritesheet.ron",
);
let map_sprite_sheet_handle =
load_sprite_sheet(world, "texture/cp437_20x20.png", "texture/cp437_20x20.ron");
let (width, height) = {
let dim = world.read_resource::<ScreenDimensions>();
(dim.width(), dim.height())
};
let _reference = init_reference_sprite(world, &circle_sprite_sheet_handle);
let player = init_player(world, &circle_sprite_sheet_handle);
let _camera = init_camera(
world,
player,
Transform::from(Vector3::new(0.0, 0.0, 1.1)),
Camera::standard_2d(width, height),
);
let _reference_screen = init_screen_reference_sprite(world, &circle_sprite_sheet_handle);
// create a test debug lines entity
let _ = world
.create_entity()
.with(DebugLinesComponent::with_capacity(1))
.build();
let map = TileMap::<ExampleTile, MortonEncoder>::new(
Vector3::new(48, 48, 1),
Vector3::new(20, 20, 1),
Some(map_sprite_sheet_handle),
);
let _map_entity = world
.create_entity()
.with(map)
.with(Transform::default())
.build();
}
fn handle_event(
&mut self,
data: StateData<'_, GameData<'_, '_>>,
event: StateEvent,
) -> SimpleTrans {
let StateData { .. } = data;
if let StateEvent::Window(event) = &event {
if is_close_requested(&event) || is_key_down(&event, winit::VirtualKeyCode::Escape) {
Trans::Quit
} else {
Trans::None
}
} else {
Trans::None
}
}
}
fn main() -> amethyst::Result<()> {
amethyst::Logger::from_config(Default::default())
.level_for("amethyst_tiles", log::LevelFilter::Warn)
.start();
let app_root = application_root_dir()?;
let assets_directory = app_root.join("examples/assets");
let display_config_path = app_root.join("examples/tiles/resources/display_config.ron");
let game_data = GameDataBuilder::default()
.with_bundle(TransformBundle::new())?
.with_bundle(
InputBundle::<StringBindings>::new()
.with_bindings_from_file("examples/tiles/resources/input.ron")?,
)?
.with(
MapMovementSystem::default(),
"MapMovementSystem",
&["input_system"],
)
.with(
CameraSwitchSystem::default(),
"camera_switch",
&["input_system"],
)
.with(
CameraMovementSystem::default(),
"movement",
&["camera_switch"],
)
.with(
DrawSelectionSystem::default(),
"DrawSelectionSystem",
&["camera_switch"],
)
.with_bundle(
RenderingBundle::<DefaultBackend>::new()
.with_plugin(
RenderToWindow::from_config_path(display_config_path)?
.with_clear([0.34, 0.36, 0.52, 1.0]),
)
.with_plugin(RenderDebugLines::default())
.with_plugin(RenderFlat2D::default())
.with_plugin(RenderTiles2D::<ExampleTile, MortonEncoder>::default()),
)?;
let mut game = Application::build(assets_directory, Example)?.build(game_data)?;
game.run();
Ok(())
} | self.vector = Vector3::new(100.0, 0.0, 0.0);
}
}
} |
sequence.rs | use crate::{
function::{Either, PyComparisonValue},
types::{richcompare_wrapper, PyComparisonOp, RichCompareFunc},
vm::VirtualMachine,
AsObject, PyObject, PyObjectRef, PyResult,
};
use itertools::Itertools;
use optional::Optioned;
use std::{collections::VecDeque, ops::Range};
pub trait ObjectSequenceOp<'a> {
type Iter: ExactSizeIterator<Item = &'a PyObjectRef>;
fn iter(&'a self) -> Self::Iter;
fn eq(&'a self, vm: &VirtualMachine, other: &'a Self) -> PyResult<bool> {
let lhs = self.iter();
let rhs = other.iter();
if lhs.len() != rhs.len() {
return Ok(false);
}
for (a, b) in lhs.zip_eq(rhs) {
if !vm.identical_or_equal(a, b)? {
return Ok(false);
}
}
Ok(true)
}
fn cmp(&'a self, vm: &VirtualMachine, other: &'a Self, op: PyComparisonOp) -> PyResult<bool> {
let less = match op {
PyComparisonOp::Eq => return self.eq(vm, other),
PyComparisonOp::Ne => return self.eq(vm, other).map(|eq| !eq),
PyComparisonOp::Lt | PyComparisonOp::Le => true,
PyComparisonOp::Gt | PyComparisonOp::Ge => false,
};
let lhs = self.iter();
let rhs = other.iter();
let lhs_len = lhs.len();
let rhs_len = rhs.len();
for (a, b) in lhs.zip(rhs) {
let ret = if less {
vm.bool_seq_lt(a, b)?
} else {
vm.bool_seq_gt(a, b)?
};
if let Some(v) = ret {
return Ok(v);
}
}
Ok(op.eval_ord(lhs_len.cmp(&rhs_len)))
}
}
impl<'a> ObjectSequenceOp<'a> for [PyObjectRef] {
type Iter = core::slice::Iter<'a, PyObjectRef>;
fn iter(&'a self) -> Self::Iter {
self.iter()
}
}
impl<'a> ObjectSequenceOp<'a> for VecDeque<PyObjectRef> {
type Iter = std::collections::vec_deque::Iter<'a, PyObjectRef>;
fn iter(&'a self) -> Self::Iter {
self.iter()
}
}
pub trait MutObjectSequenceOp<'a> {
type Guard;
fn do_get(index: usize, guard: &Self::Guard) -> Option<&PyObjectRef>;
fn do_lock(&'a self) -> Self::Guard;
fn mut_count(&'a self, vm: &VirtualMachine, needle: &PyObject) -> PyResult<usize> {
let mut count = 0;
self._mut_iter_equal_skeleton::<_, false>(vm, needle, 0..isize::MAX as usize, || {
count += 1
})?;
Ok(count)
} | needle: &PyObject,
range: Range<usize>,
) -> PyResult<Optioned<usize>> {
self._mut_iter_equal_skeleton::<_, true>(vm, needle, range, || {})
}
fn mut_index(&'a self, vm: &VirtualMachine, needle: &PyObject) -> PyResult<Optioned<usize>> {
self.mut_index_range(vm, needle, 0..isize::MAX as usize)
}
fn mut_contains(&'a self, vm: &VirtualMachine, needle: &PyObject) -> PyResult<bool> {
self.mut_index(vm, needle).map(|x| x.is_some())
}
fn _mut_iter_equal_skeleton<F, const SHORT: bool>(
&'a self,
vm: &VirtualMachine,
needle: &PyObject,
range: Range<usize>,
mut f: F,
) -> PyResult<Optioned<usize>>
where
F: FnMut(),
{
let needle_cls = needle.class();
let needle_cmp = needle_cls
.mro_find_map(|cls| cls.slots.richcompare.load())
.unwrap();
let mut borrower = None;
let mut i = range.start;
let index = loop {
if i >= range.end {
break Optioned::<usize>::none();
}
let guard = if let Some(x) = borrower.take() {
x
} else {
self.do_lock()
};
let elem = if let Some(x) = Self::do_get(i, &guard) {
x
} else {
break Optioned::<usize>::none();
};
if elem.is(needle) {
f();
if SHORT {
break Optioned::<usize>::some(i);
}
borrower = Some(guard);
} else {
let elem_cls = elem.class();
let reverse_first =
!elem_cls.is(&needle_cls) && elem_cls.fast_issubclass(&needle_cls);
let eq = if reverse_first {
let elem_cmp = elem_cls
.mro_find_map(|cls| cls.slots.richcompare.load())
.unwrap();
drop(elem_cls);
fn cmp(
elem: &PyObject,
needle: &PyObject,
elem_cmp: RichCompareFunc,
needle_cmp: RichCompareFunc,
vm: &VirtualMachine,
) -> PyResult<bool> {
match elem_cmp(elem, needle, PyComparisonOp::Eq, vm)? {
Either::B(PyComparisonValue::Implemented(value)) => Ok(value),
Either::A(obj) if !obj.is(&vm.ctx.not_implemented) => {
obj.try_to_bool(vm)
}
_ => match needle_cmp(needle, elem, PyComparisonOp::Eq, vm)? {
Either::B(PyComparisonValue::Implemented(value)) => Ok(value),
Either::A(obj) if !obj.is(&vm.ctx.not_implemented) => {
obj.try_to_bool(vm)
}
_ => Ok(false),
},
}
}
if elem_cmp as usize == richcompare_wrapper as usize {
let elem = elem.clone();
drop(guard);
cmp(&elem, needle, elem_cmp, needle_cmp, vm)?
} else {
let eq = cmp(elem, needle, elem_cmp, needle_cmp, vm)?;
borrower = Some(guard);
eq
}
} else {
match needle_cmp(needle, elem, PyComparisonOp::Eq, vm)? {
Either::B(PyComparisonValue::Implemented(value)) => {
drop(elem_cls);
borrower = Some(guard);
value
}
Either::A(obj) if !obj.is(&vm.ctx.not_implemented) => {
drop(elem_cls);
borrower = Some(guard);
obj.try_to_bool(vm)?
}
_ => {
let elem_cmp = elem_cls
.mro_find_map(|cls| cls.slots.richcompare.load())
.unwrap();
drop(elem_cls);
fn cmp(
elem: &PyObject,
needle: &PyObject,
elem_cmp: RichCompareFunc,
vm: &VirtualMachine,
) -> PyResult<bool> {
match elem_cmp(elem, needle, PyComparisonOp::Eq, vm)? {
Either::B(PyComparisonValue::Implemented(value)) => Ok(value),
Either::A(obj) if !obj.is(&vm.ctx.not_implemented) => {
obj.try_to_bool(vm)
}
_ => Ok(false),
}
}
if elem_cmp as usize == richcompare_wrapper as usize {
let elem = elem.clone();
drop(guard);
cmp(&elem, needle, elem_cmp, vm)?
} else {
let eq = cmp(elem, needle, elem_cmp, vm)?;
borrower = Some(guard);
eq
}
}
}
};
if eq {
f();
if SHORT {
break Optioned::<usize>::some(i);
}
}
}
i += 1;
};
Ok(index)
}
}
pub trait SequenceOp<T: Clone>
where
Self: AsRef<[T]>,
{
fn mul(&self, vm: &VirtualMachine, n: isize) -> PyResult<Vec<T>> {
let n = vm.check_repeat_or_overflow_error(self.as_ref().len(), n)?;
let mut v = Vec::with_capacity(n * self.as_ref().len());
for _ in 0..n {
v.extend_from_slice(self.as_ref());
}
Ok(v)
}
}
impl<T: Clone> SequenceOp<T> for [T] {}
pub trait SequenceMutOp<T: Clone>
where
Self: AsRef<[T]>,
{
fn as_vec_mut(&mut self) -> &mut Vec<T>;
fn imul(&mut self, vm: &VirtualMachine, n: isize) -> PyResult<()> {
let n = vm.check_repeat_or_overflow_error(self.as_ref().len(), n)?;
if n == 0 {
self.as_vec_mut().clear();
} else if n != 1 {
let mut sample = self.as_vec_mut().clone();
if n != 2 {
self.as_vec_mut().reserve(sample.len() * (n - 1));
for _ in 0..n - 2 {
self.as_vec_mut().extend_from_slice(&sample);
}
}
self.as_vec_mut().append(&mut sample);
}
Ok(())
}
}
impl<T: Clone> SequenceMutOp<T> for Vec<T> {
fn as_vec_mut(&mut self) -> &mut Vec<T> {
self
}
} |
fn mut_index_range(
&'a self,
vm: &VirtualMachine, |
jobs.py | """
Copyright (c) 2011, 2012, Regents of the University of California
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the
distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
OF THE POSSIBILITY OF SUCH DAMAGE.
"""
"""
@author Tyler Hoyt <[email protected]>
"""
import time
from twisted.internet import task, reactor, defer
from smap import util
class SmapJob:
def __init__(self, job):
self.name = job['Name'] if 'Name' in job else None
self.after = job['After'] if 'After' in job else None
self.start_time = job['StartTime'] if 'StartTime' in job else None
self.actions = job['Actions']
self.uuid = None
class SmapJobsManager:
def __init__(self, path, inst):
self.jobs = []
self._job_ids = {}
self.inst = inst
self.actuators = [path]
def add_job(self, job):
j = SmapJob(job)
if 'StartTime' in job:
start = job['StartTime'] / 1000.
wait = start - util.now()
else:
wait = 0
assert wait >= 0
actions = j.actions
if j.after:
previous_job = util.find(lambda x: x.name == j.after, self.jobs)
if previous_job is None:
raise util.SmapException("No job named %s") % j.after
else:
j.d_outer = previous_job.d_outer
j.job_id = previous_job.job_id
j.uuid = job['uuid']
self._job_ids[j.uuid] = j.job_id
else: # assign it its own deferred
j.d_outer = defer.Deferred()
# closure that will carry out all of the job's actions
def act(_):
for action in actions:
path = action['Path']
state = action['State']
actuator = self.inst.get_timeseries(path)
print('Setting', path, 'to', state)
actuator.impl.set_state(None, state)
# queue the callback
j.d_outer.addCallback(act)
print('Added callback to', j.d_outer)
if not j.after:
# job_id will let you cancel it
j.job_id = reactor.callLater(wait, j.d_outer.callback, None)
self._job_ids[job['uuid']] = j.job_id
self.jobs.append(j)
return j.d_outer
def cancel_job(self, uuids):
uuids = set(uuids)
for uuid in uuids:
print('Cancelling job', uuid) | call_id = self._job_ids[uuid]
call_id.cancel()
except Exception:
pass | try: |
Tasks.py | from bilibili import bilibili
import datetime
import time
import asyncio
import traceback
import os
import configloader
import utils
from printer import Printer
class Tasks:
def __init__(self):
fileDir = os.path.dirname(os.path.realpath('__file__'))
file_user = fileDir + "/conf/user.conf"
self.dic_user = configloader.load_user(file_user)
# 获取每日包裹奖励
async def Daily_bag(self):
response = await bilibili().get_dailybag()
json_response = await response.json()
for i in range(0, len(json_response['data']['bag_list'])):
Printer().printer(f"获得-{json_response['data']['bag_list'][i]['bag_name']}-成功", "Info", "green")
def CurrentTime(self):
currenttime = str(int(time.mktime(datetime.datetime.now().timetuple())))
return currenttime
# 签到功能
async def DoSign(self):
response = await bilibili().get_dosign()
temp = await response.json(content_type=None)
Printer().printer(f"签到状态:{temp['msg']}", "Info", "green")
# 领取每日任务奖励
async def Daily_Task(self):
response2 = await bilibili().get_dailytask()
json_response2 = await response2.json()
Printer().printer(f"双端观看直播:{json_response2['msg']}", "Info", "green")
# 应援团签到
async def link_sign(self):
response = await bilibili().get_grouplist()
json_response = await response.json(content_type=None)
check = len(json_response['data']['list'])
group_id_list = []
owner_uid_list = []
for i in range(0, check):
group_id = json_response['data']['list'][i]['group_id']
owner_uid = json_response['data']['list'][i]['owner_uid']
group_id_list.append(group_id)
owner_uid_list.append(owner_uid)
for (i1, i2) in zip(group_id_list, owner_uid_list):
response = await bilibili().assign_group(i1, i2)
json_response = await response.json(content_type=None)
if json_response['code'] == 0:
if (json_response['data']['status']) == 1:
Printer().printer(f"应援团{i1}已应援过", "Info", "green")
if (json_response['data']['status']) == 0:
Printer().printer(f"应援团{i1}应援成功,获得{json_response['data']['add_num']}点亲密度", "Info", "green")
| nd_gift(self):
if self.dic_user['gift']['on/off'] == '1':
argvs, x = await utils.fetch_bag_list(printer=False)
for i in range(0, len(argvs)):
giftID = argvs[i][0]
giftNum = argvs[i][1]
bagID = argvs[i][2]
roomID = self.dic_user['gift']['send_to_room']
await utils.send_gift_web(roomID, giftID, giftNum, bagID)
if not argvs:
Printer().printer(f"没有将要过期的礼物~", "Info", "green")
async def auto_send_gift(self):
if self.dic_user['auto-gift']['on/off'] == "1":
a = await utils.fetch_medal(printer=False)
res = await bilibili().gift_list()
json_res = await res.json()
temp_dic = {}
for j in range(0, len(json_res['data'])):
price = json_res['data'][j]['price']
id = json_res['data'][j]['id']
temp_dic[id] = price
x, temp = await utils.fetch_bag_list(printer=False)
roomid = a[0]
today_feed = a[1]
day_limit = a[2]
left_num = int(day_limit) - int(today_feed)
calculate = 0
for i in range(0, len(temp)):
gift_id = int(temp[i][0])
gift_num = int(temp[i][1])
bag_id = int(temp[i][2])
expire = int(temp[i][3])
if (gift_id != 4 and gift_id != 3 and gift_id != 9 and gift_id != 10) and expire != 0:
if (gift_num * (temp_dic[gift_id] / 100) < left_num):
calculate = calculate + temp_dic[gift_id] / 100 * gift_num
tmp2 = temp_dic[gift_id] / 100 * gift_num
await utils.send_gift_web(roomid, gift_id, gift_num, bag_id)
left_num = left_num - tmp2
elif left_num - temp_dic[gift_id] / 100 >= 0:
tmp = (left_num) / (temp_dic[gift_id] / 100)
tmp1 = (temp_dic[gift_id] / 100) * int(tmp)
calculate = calculate + tmp1
await utils.send_gift_web(roomid, gift_id, tmp, bag_id)
left_num = left_num - tmp1
Printer().printer(f"自动送礼共送出亲密度为{int(calculate)}的礼物", "Info", "green")
async def doublegain_coin2silver(self):
if self.dic_user['doublegain_coin2silver']['on/off'] == "1":
response0 = await bilibili().request_doublegain_coin2silver()
json_response0 = await response0.json()
response1 = await bilibili().request_doublegain_coin2silver()
json_response1 = await response1.json()
print(json_response0['msg'], json_response1['msg'])
async def sliver2coin(self):
if self.dic_user['coin']['on/off'] == '1':
response1 = await bilibili().silver2coin_app()
json_response1 = await response1.json()
Printer().printer(f"银瓜子兑换硬币状态:{json_response1['msg']}", "Info", "green")
async def run(self):
while 1:
try:
Printer().printer(f"开始执行每日任务", "Info", "green")
await self.DoSign()
await self.Daily_bag()
await self.Daily_Task()
await self.link_sign()
await self.send_gift()
await self.sliver2coin()
await self.doublegain_coin2silver()
await self.auto_send_gift()
await utils.reconnect()
await asyncio.sleep(21600)
except:
await asyncio.sleep(10)
Printer().printer(traceback.format_exc(), "Error", "red")
| else:
Printer().printer(f"应援团{i1}应援失败,{json_response}", "Error", "red")
async def se |
certificate_signing_request_response.py | # coding: utf-8
"""
FlashArray REST API
|
OpenAPI spec version: 2.5
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re
import six
import typing
from ....properties import Property
if typing.TYPE_CHECKING:
from pypureclient.flasharray.FA_2_5 import models
class CertificateSigningRequestResponse(object):
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'items': 'list[CertificateSigningRequest]'
}
attribute_map = {
'items': 'items'
}
required_args = {
}
def __init__(
self,
items=None, # type: List[models.CertificateSigningRequest]
):
"""
Keyword args:
items (list[CertificateSigningRequest])
"""
if items is not None:
self.items = items
def __setattr__(self, key, value):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `CertificateSigningRequestResponse`".format(key))
self.__dict__[key] = value
def __getattribute__(self, item):
value = object.__getattribute__(self, item)
if isinstance(value, Property):
raise AttributeError
else:
return value
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
if hasattr(self, attr):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(CertificateSigningRequestResponse, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, CertificateSigningRequestResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other | No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) |
18.b36ce7b70fb8a89bf6f2.js | (window["webpackJsonp"] = window["webpackJsonp"] || []).push([[18],{
/***/ "./node_modules/babel-loader/lib/index.js?!./node_modules/vuetify-loader/lib/loader.js?!./node_modules/vue-loader/lib/index.js?!./resources/js/Pages/Profile.vue?vue&type=script&lang=js&":
/*!********************************************************************************************************************************************************************************************************************!*\
!*** ./node_modules/babel-loader/lib??ref--4-0!./node_modules/vuetify-loader/lib/loader.js??ref--11-0!./node_modules/vue-loader/lib??vue-loader-options!./resources/js/Pages/Profile.vue?vue&type=script&lang=js& ***!
\********************************************************************************************************************************************************************************************************************/
/*! exports provided: default */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony import */ var _components_globals_Navbar__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../components/globals/Navbar */ "./resources/js/components/globals/Navbar.vue");
/* harmony import */ var _components_globals_AppFooter__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../components/globals/AppFooter */ "./resources/js/components/globals/AppFooter.vue");
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
/* harmony default export */ __webpack_exports__["default"] = ({
data: function data() {
return {
activatedTab: null,
item: 0,
items: [{
text: "Profile",
icon: "mdi-account-outline",
path: "details"
}, {
text: "Mes Formations",
icon: "mdi-format-list-bulleted-square",
path: "cours"
}, {
text: "Mes Certaficats",
icon: "mdi-certificate-outline",
path: "history"
}, {
text: "Historique d'achat",
icon: "mdi-history",
path: "history"
}, {
text: "Mes Réalisations",
icon: "mdi-check-circle",
path: "history"
}, {
text: "Paramètres du Compte",
icon: "mdi-cog-outline",
path: "parametres"
}]
};
},
components: {
Navbar: _components_globals_Navbar__WEBPACK_IMPORTED_MODULE_0__["default"], | AppFooter: _components_globals_AppFooter__WEBPACK_IMPORTED_MODULE_1__["default"]
},
methods: {
EditUser: function EditUser() {
return this.$inertia.post(route("user.edit", this.form));
}
}
});
/***/ }),
/***/ "./node_modules/vue-loader/lib/loaders/templateLoader.js?!./node_modules/vuetify-loader/lib/loader.js?!./node_modules/vue-loader/lib/index.js?!./resources/js/Pages/Profile.vue?vue&type=template&id=1bdc34e0&":
/*!************************************************************************************************************************************************************************************************************************************************************!*\
!*** ./node_modules/vue-loader/lib/loaders/templateLoader.js??vue-loader-options!./node_modules/vuetify-loader/lib/loader.js??ref--11-0!./node_modules/vue-loader/lib??vue-loader-options!./resources/js/Pages/Profile.vue?vue&type=template&id=1bdc34e0& ***!
\************************************************************************************************************************************************************************************************************************************************************/
/*! exports provided: render, staticRenderFns */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "render", function() { return render; });
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "staticRenderFns", function() { return staticRenderFns; });
var render = function() {
var _vm = this
var _h = _vm.$createElement
var _c = _vm._self._c || _h
return _c("v-app", [
_c(
"div",
{ staticClass: "profile" },
[
_c("Navbar"),
_vm._v(" "),
_c(
"section",
{ staticClass: "py-7" },
[
_c(
"v-container",
[
_c(
"v-row",
[
_c(
"v-col",
{ attrs: { cols: "12", md: "3" } },
[
_c(
"v-list",
[
_c(
"v-list-item",
[
_c(
"v-list-item-avatar",
{ attrs: { size: "100" } },
[
_c("v-img", {
attrs: {
src:
"require(`\\upload\\$page.auth.user.image')`)"
}
})
],
1
)
],
1
),
_vm._v(" "),
_c(
"v-list-item",
[
_c(
"v-list-item-content",
[
_c(
"v-list-item-title",
{ staticClass: "title" },
[
_vm._v(
_vm._s(_vm.$page.auth.user.nom) +
" " +
_vm._s(_vm.$page.auth.user.prenom) +
" "
)
]
),
_vm._v(" "),
_c("v-list-item-subtitle", [
_vm._v(_vm._s(_vm.$page.auth.user.image))
])
],
1
)
],
1
)
],
1
),
_vm._v(" "),
_c("v-divider"),
_vm._v(" "),
_c(
"v-list",
{ attrs: { nav: "", dense: "" } },
[
_c(
"v-list-item-group",
{
attrs: { color: "primary" },
model: {
value: _vm.item,
callback: function($$v) {
_vm.item = $$v
},
expression: "item"
}
},
_vm._l(_vm.items, function(item, i) {
return _c(
"v-list-item",
{
key: i,
attrs: {
router: "",
to: "" + item.path,
exact: ""
}
},
[
_c(
"v-list-item-icon",
[
_c("v-icon", {
domProps: {
textContent: _vm._s(item.icon)
}
})
],
1
),
_vm._v(" "),
_c(
"v-list-item-content",
[
_c("v-list-item-title", {
domProps: {
textContent: _vm._s(item.text)
}
})
],
1
)
],
1
)
}),
1
)
],
1
)
],
1
),
_vm._v(" "),
_c(
"v-col",
{ attrs: { cols: "12", md: "9" } },
[_c("router-view")],
1
)
],
1
)
],
1
)
],
1
),
_vm._v(" "),
_c("AppFooter")
],
1
)
])
}
var staticRenderFns = []
render._withStripped = true
/***/ }),
/***/ "./resources/js/Pages/Profile.vue":
/*!****************************************!*\
!*** ./resources/js/Pages/Profile.vue ***!
\****************************************/
/*! exports provided: default */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony import */ var _Profile_vue_vue_type_template_id_1bdc34e0___WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./Profile.vue?vue&type=template&id=1bdc34e0& */ "./resources/js/Pages/Profile.vue?vue&type=template&id=1bdc34e0&");
/* harmony import */ var _Profile_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./Profile.vue?vue&type=script&lang=js& */ "./resources/js/Pages/Profile.vue?vue&type=script&lang=js&");
/* empty/unused harmony star reexport *//* harmony import */ var _node_modules_vue_loader_lib_runtime_componentNormalizer_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../../../node_modules/vue-loader/lib/runtime/componentNormalizer.js */ "./node_modules/vue-loader/lib/runtime/componentNormalizer.js");
/* harmony import */ var _node_modules_vuetify_loader_lib_runtime_installComponents_js__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../../../node_modules/vuetify-loader/lib/runtime/installComponents.js */ "./node_modules/vuetify-loader/lib/runtime/installComponents.js");
/* harmony import */ var _node_modules_vuetify_loader_lib_runtime_installComponents_js__WEBPACK_IMPORTED_MODULE_3___default = /*#__PURE__*/__webpack_require__.n(_node_modules_vuetify_loader_lib_runtime_installComponents_js__WEBPACK_IMPORTED_MODULE_3__);
/* harmony import */ var vuetify_lib_components_VApp__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! vuetify/lib/components/VApp */ "./node_modules/vuetify/lib/components/VApp/index.js");
/* harmony import */ var vuetify_lib_components_VGrid__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! vuetify/lib/components/VGrid */ "./node_modules/vuetify/lib/components/VGrid/index.js");
/* harmony import */ var vuetify_lib_components_VDivider__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! vuetify/lib/components/VDivider */ "./node_modules/vuetify/lib/components/VDivider/index.js");
/* harmony import */ var vuetify_lib_components_VIcon__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! vuetify/lib/components/VIcon */ "./node_modules/vuetify/lib/components/VIcon/index.js");
/* harmony import */ var vuetify_lib_components_VImg__WEBPACK_IMPORTED_MODULE_8__ = __webpack_require__(/*! vuetify/lib/components/VImg */ "./node_modules/vuetify/lib/components/VImg/index.js");
/* harmony import */ var vuetify_lib_components_VList__WEBPACK_IMPORTED_MODULE_9__ = __webpack_require__(/*! vuetify/lib/components/VList */ "./node_modules/vuetify/lib/components/VList/index.js");
/* normalize component */
var component = Object(_node_modules_vue_loader_lib_runtime_componentNormalizer_js__WEBPACK_IMPORTED_MODULE_2__["default"])(
_Profile_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_1__["default"],
_Profile_vue_vue_type_template_id_1bdc34e0___WEBPACK_IMPORTED_MODULE_0__["render"],
_Profile_vue_vue_type_template_id_1bdc34e0___WEBPACK_IMPORTED_MODULE_0__["staticRenderFns"],
false,
null,
null,
null
)
/* vuetify-loader */
_node_modules_vuetify_loader_lib_runtime_installComponents_js__WEBPACK_IMPORTED_MODULE_3___default()(component, {VApp: vuetify_lib_components_VApp__WEBPACK_IMPORTED_MODULE_4__["VApp"],VCol: vuetify_lib_components_VGrid__WEBPACK_IMPORTED_MODULE_5__["VCol"],VContainer: vuetify_lib_components_VGrid__WEBPACK_IMPORTED_MODULE_5__["VContainer"],VDivider: vuetify_lib_components_VDivider__WEBPACK_IMPORTED_MODULE_6__["VDivider"],VIcon: vuetify_lib_components_VIcon__WEBPACK_IMPORTED_MODULE_7__["VIcon"],VImg: vuetify_lib_components_VImg__WEBPACK_IMPORTED_MODULE_8__["VImg"],VList: vuetify_lib_components_VList__WEBPACK_IMPORTED_MODULE_9__["VList"],VListItem: vuetify_lib_components_VList__WEBPACK_IMPORTED_MODULE_9__["VListItem"],VListItemAvatar: vuetify_lib_components_VList__WEBPACK_IMPORTED_MODULE_9__["VListItemAvatar"],VListItemContent: vuetify_lib_components_VList__WEBPACK_IMPORTED_MODULE_9__["VListItemContent"],VListItemGroup: vuetify_lib_components_VList__WEBPACK_IMPORTED_MODULE_9__["VListItemGroup"],VListItemIcon: vuetify_lib_components_VList__WEBPACK_IMPORTED_MODULE_9__["VListItemIcon"],VListItemSubtitle: vuetify_lib_components_VList__WEBPACK_IMPORTED_MODULE_9__["VListItemSubtitle"],VListItemTitle: vuetify_lib_components_VList__WEBPACK_IMPORTED_MODULE_9__["VListItemTitle"],VRow: vuetify_lib_components_VGrid__WEBPACK_IMPORTED_MODULE_5__["VRow"]})
/* hot reload */
if (false) { var api; }
component.options.__file = "resources/js/Pages/Profile.vue"
/* harmony default export */ __webpack_exports__["default"] = (component.exports);
/***/ }),
/***/ "./resources/js/Pages/Profile.vue?vue&type=script&lang=js&":
/*!*****************************************************************!*\
!*** ./resources/js/Pages/Profile.vue?vue&type=script&lang=js& ***!
\*****************************************************************/
/*! exports provided: default */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony import */ var _node_modules_babel_loader_lib_index_js_ref_4_0_node_modules_vuetify_loader_lib_loader_js_ref_11_0_node_modules_vue_loader_lib_index_js_vue_loader_options_Profile_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! -!../../../node_modules/babel-loader/lib??ref--4-0!../../../node_modules/vuetify-loader/lib/loader.js??ref--11-0!../../../node_modules/vue-loader/lib??vue-loader-options!./Profile.vue?vue&type=script&lang=js& */ "./node_modules/babel-loader/lib/index.js?!./node_modules/vuetify-loader/lib/loader.js?!./node_modules/vue-loader/lib/index.js?!./resources/js/Pages/Profile.vue?vue&type=script&lang=js&");
/* empty/unused harmony star reexport */ /* harmony default export */ __webpack_exports__["default"] = (_node_modules_babel_loader_lib_index_js_ref_4_0_node_modules_vuetify_loader_lib_loader_js_ref_11_0_node_modules_vue_loader_lib_index_js_vue_loader_options_Profile_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_0__["default"]);
/***/ }),
/***/ "./resources/js/Pages/Profile.vue?vue&type=template&id=1bdc34e0&":
/*!***********************************************************************!*\
!*** ./resources/js/Pages/Profile.vue?vue&type=template&id=1bdc34e0& ***!
\***********************************************************************/
/*! exports provided: render, staticRenderFns */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony import */ var _node_modules_vue_loader_lib_loaders_templateLoader_js_vue_loader_options_node_modules_vuetify_loader_lib_loader_js_ref_11_0_node_modules_vue_loader_lib_index_js_vue_loader_options_Profile_vue_vue_type_template_id_1bdc34e0___WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! -!../../../node_modules/vue-loader/lib/loaders/templateLoader.js??vue-loader-options!../../../node_modules/vuetify-loader/lib/loader.js??ref--11-0!../../../node_modules/vue-loader/lib??vue-loader-options!./Profile.vue?vue&type=template&id=1bdc34e0& */ "./node_modules/vue-loader/lib/loaders/templateLoader.js?!./node_modules/vuetify-loader/lib/loader.js?!./node_modules/vue-loader/lib/index.js?!./resources/js/Pages/Profile.vue?vue&type=template&id=1bdc34e0&");
/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "render", function() { return _node_modules_vue_loader_lib_loaders_templateLoader_js_vue_loader_options_node_modules_vuetify_loader_lib_loader_js_ref_11_0_node_modules_vue_loader_lib_index_js_vue_loader_options_Profile_vue_vue_type_template_id_1bdc34e0___WEBPACK_IMPORTED_MODULE_0__["render"]; });
/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "staticRenderFns", function() { return _node_modules_vue_loader_lib_loaders_templateLoader_js_vue_loader_options_node_modules_vuetify_loader_lib_loader_js_ref_11_0_node_modules_vue_loader_lib_index_js_vue_loader_options_Profile_vue_vue_type_template_id_1bdc34e0___WEBPACK_IMPORTED_MODULE_0__["staticRenderFns"]; });
/***/ })
}]); | |
client.go | package client
import (
"context"
"github.com/mongodb/mongodb-kubernetes-operator/pkg/kube/pod"
"github.com/mongodb/mongodb-kubernetes-operator/pkg/kube/configmap"
"github.com/mongodb/mongodb-kubernetes-operator/pkg/kube/secret"
"github.com/mongodb/mongodb-kubernetes-operator/pkg/kube/service"
"github.com/mongodb/mongodb-kubernetes-operator/pkg/kube/statefulset"
appsv1 "k8s.io/api/apps/v1"
corev1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/types"
k8sClient "sigs.k8s.io/controller-runtime/pkg/client"
)
func NewClient(c k8sClient.Client) Client {
return client{
Client: c,
}
}
type Client interface {
k8sClient.Client
KubernetesSecretClient
// TODO: remove this function, add mongodb package which has GetAndUpdate function
GetAndUpdate(nsName types.NamespacedName, obj k8sClient.Object, updateFunc func()) error
configmap.GetUpdateCreateDeleter
service.GetUpdateCreateDeleter
statefulset.GetUpdateCreateDeleter
pod.Getter
}
type KubernetesSecretClient interface {
secret.GetUpdateCreateDeleter
}
type client struct {
k8sClient.Client
}
// GetAndUpdate fetches the most recent version of the runtime.Object with the provided
// nsName and applies the update function. The update function should update "obj" from
// an outer scope
func (c client) GetAndUpdate(nsName types.NamespacedName, obj k8sClient.Object, updateFunc func()) error {
err := c.Get(context.TODO(), nsName, obj)
if err != nil {
return err
}
// apply the function on the most recent version of the resource
updateFunc()
return c.Update(context.TODO(), obj)
}
// GetConfigMap provides a thin wrapper and client.client to access corev1.ConfigMap types
func (c client) GetConfigMap(objectKey k8sClient.ObjectKey) (corev1.ConfigMap, error) {
cm := corev1.ConfigMap{}
if err := c.Get(context.TODO(), objectKey, &cm); err != nil {
return corev1.ConfigMap{}, err
}
return cm, nil
}
// UpdateConfigMap provides a thin wrapper and client.Client to update corev1.ConfigMap types
func (c client) UpdateConfigMap(cm corev1.ConfigMap) error {
return c.Update(context.TODO(), &cm)
}
// CreateConfigMap provides a thin wrapper and client.Client to create corev1.ConfigMap types
func (c client) CreateConfigMap(cm corev1.ConfigMap) error {
return c.Create(context.TODO(), &cm)
}
// DeleteConfigMap deletes the configmap of the given object key
func (c client) DeleteConfigMap(key k8sClient.ObjectKey) error {
cm := corev1.ConfigMap{
ObjectMeta: metav1.ObjectMeta{
Name: key.Name,
Namespace: key.Namespace,
},
}
return c.Delete(context.TODO(), &cm)
}
// GetPod provides a thin wrapper and client.client to access corev1.Pod types.
func (c client) GetPod(objectKey k8sClient.ObjectKey) (corev1.Pod, error) {
p := corev1.Pod{}
if err := c.Get(context.TODO(), objectKey, &p); err != nil {
return corev1.Pod{}, err
}
return p, nil
}
// GetSecret provides a thin wrapper and client.Client to access corev1.Secret types
func (c client) GetSecret(objectKey k8sClient.ObjectKey) (corev1.Secret, error) {
s := corev1.Secret{}
if err := c.Get(context.TODO(), objectKey, &s); err != nil {
return corev1.Secret{}, err
}
return s, nil
}
// UpdateSecret provides a thin wrapper and client.Client to update corev1.Secret types
func (c client) UpdateSecret(secret corev1.Secret) error {
return c.Update(context.TODO(), &secret)
}
// CreateSecret provides a thin wrapper and client.Client to create corev1.Secret types
func (c client) CreateSecret(secret corev1.Secret) error {
return c.Create(context.TODO(), &secret)
}
// DeleteSecret provides a thin wrapper and client.Client to delete corev1.Secret types
func (c client) DeleteSecret(key k8sClient.ObjectKey) error {
s := corev1.Secret{
ObjectMeta: metav1.ObjectMeta{
Name: key.Name,
Namespace: key.Namespace,
},
}
return c.Delete(context.TODO(), &s)
}
// GetService provides a thin wrapper and client.Client to access corev1.Service types
func (c client) GetService(objectKey k8sClient.ObjectKey) (corev1.Service, error) {
s := corev1.Service{}
if err := c.Get(context.TODO(), objectKey, &s); err != nil |
return s, nil
}
// UpdateService provides a thin wrapper and client.Client to update corev1.Service types
func (c client) UpdateService(service corev1.Service) error {
return c.Update(context.TODO(), &service)
}
// CreateService provides a thin wrapper and client.Client to create corev1.Service types
func (c client) CreateService(service corev1.Service) error {
return c.Create(context.TODO(), &service)
}
// DeleteService provides a thin wrapper around client.Client to delete corev1.Service types
func (c client) DeleteService(objectKey k8sClient.ObjectKey) error {
svc := corev1.Service{
ObjectMeta: metav1.ObjectMeta{
Name: objectKey.Name,
Namespace: objectKey.Namespace,
},
}
return c.Delete(context.TODO(), &svc)
}
// GetStatefulSet provides a thin wrapper and client.Client to access appsv1.StatefulSet types
func (c client) GetStatefulSet(objectKey k8sClient.ObjectKey) (appsv1.StatefulSet, error) {
sts := appsv1.StatefulSet{}
if err := c.Get(context.TODO(), objectKey, &sts); err != nil {
return appsv1.StatefulSet{}, err
}
return sts, nil
}
// UpdateStatefulSet provides a thin wrapper and client.Client to update appsv1.StatefulSet types
// the updated StatefulSet is returned
func (c client) UpdateStatefulSet(sts appsv1.StatefulSet) (appsv1.StatefulSet, error) {
stsToUpdate := &sts
err := c.Update(context.TODO(), stsToUpdate)
return *stsToUpdate, err
}
// CreateStatefulSet provides a thin wrapper and client.Client to create appsv1.StatefulSet types
func (c client) CreateStatefulSet(sts appsv1.StatefulSet) error {
return c.Create(context.TODO(), &sts)
}
// DeleteStatefulSet provides a thin wrapper and client.Client to delete appsv1.StatefulSet types
func (c client) DeleteStatefulSet(objectKey k8sClient.ObjectKey) error {
sts := appsv1.StatefulSet{
ObjectMeta: metav1.ObjectMeta{
Name: objectKey.Name,
Namespace: objectKey.Namespace,
},
}
return c.Delete(context.TODO(), &sts)
}
| {
return corev1.Service{}, err
} |
test_photometry.py | import math
import numpy as np
import sncosmo
from baselayer.app.env import load_env
from skyportal.models import DBSession, Token
from skyportal.tests import api
_, cfg = load_env()
PHOT_DETECTION_THRESHOLD = cfg["misc.photometry_detection_threshold_nsigma"]
def test_token_user_post_get_photometry_data(
upload_data_token, public_source, public_group, ztf_camera
):
status, data = api(
'POST',
'photometry',
data={
'obj_id': str(public_source.id),
'mjd': 58000.0,
'instrument_id': ztf_camera.id,
'flux': 12.24,
'fluxerr': 0.031,
'zp': 25.0,
'magsys': 'ab',
'filter': 'ztfg',
'group_ids': [public_group.id],
},
token=upload_data_token,
)
assert status == 200
assert data['status'] == 'success'
photometry_id = data['data']['ids'][0]
status, data = api(
'GET', f'photometry/{photometry_id}?format=flux', token=upload_data_token
)
assert status == 200
assert data['status'] == 'success'
assert data['data']['ra'] is None
assert data['data']['dec'] is None
assert data['data']['ra_unc'] is None
assert data['data']['dec_unc'] is None
np.testing.assert_allclose(
data['data']['flux'], 12.24 * 10 ** (-0.4 * (25.0 - 23.9))
)
def test_token_user_post_put_photometry_data(
upload_data_token, public_source, public_group, ztf_camera
):
status, data = api(
'POST',
'photometry',
data={
'obj_id': str(public_source.id),
'instrument_id': ztf_camera.id,
"mjd": [59400, 59401, 59402],
"mag": [19.2, 19.3, np.random.uniform(19, 20)],
"magerr": [0.05, 0.06, np.random.uniform(0.01, 0.1)],
"limiting_mag": [20.0, 20.1, 20.2],
"magsys": ["ab", "ab", "ab"],
"filter": ["ztfr", "ztfg", "ztfr"],
"ra": [42.01, 42.01, 42.02],
"dec": [42.02, 42.01, 42.03],
"origin": [None, "lol", "lol"],
'group_ids': [public_group.id],
},
token=upload_data_token,
)
assert status == 200
assert data['status'] == 'success'
ids = data["data"]["ids"]
assert len(ids) == 3
# POSTing photometry that contains the same first two points should fail:
status, data = api(
'POST',
'photometry',
data={
'obj_id': str(public_source.id),
'instrument_id': ztf_camera.id,
"mjd": [59400, 59401, 59402],
"mag": [19.2, 19.3, np.random.uniform(19, 20)],
"magerr": [0.05, 0.06, np.random.uniform(0.01, 0.1)],
"limiting_mag": [20.0, 20.1, 20.2],
"magsys": ["ab", "ab", "ab"],
"filter": ["ztfr", "ztfg", "ztfr"],
"ra": [42.01, 42.01, 42.02],
"dec": [42.02, 42.01, 42.03],
"origin": [None, "lol", "lol"],
'group_ids': [public_group.id],
},
token=upload_data_token,
)
assert status == 400
assert data['status'] == 'error'
# PUTing photometry that contains
# the same first point, the second point with a different origin, and a new third point should succeed
# only the last two points will be ingested
status, data = api(
'PUT',
'photometry',
data={
'obj_id': str(public_source.id),
'instrument_id': ztf_camera.id,
"mjd": [59400, 59401, 59402],
"mag": [19.2, 19.3, np.random.uniform(19, 20)],
"magerr": [0.05, 0.06, np.random.uniform(0.01, 0.1)],
"limiting_mag": [20.0, 20.1, 20.2],
"magsys": ["ab", "ab", "ab"],
"filter": ["ztfr", "ztfg", "ztfr"],
"ra": [42.01, 42.01, 42.02],
"dec": [42.02, 42.01, 42.03],
"origin": [None, "omg", "lol"],
'group_ids': [public_group.id],
},
token=upload_data_token,
)
assert status == 200
assert data['status'] == 'success'
new_ids = data["data"]["ids"]
assert len(new_ids) == 3
assert len(set(new_ids).intersection(set(ids))) == 1
def test_token_user_post_put_get_photometry_data(
upload_data_token_two_groups, public_source, public_group, public_group2, ztf_camera
):
status, data = api(
'POST',
'photometry',
data={
'obj_id': str(public_source.id),
'instrument_id': ztf_camera.id,
"mjd": [59400, 59401, 59402],
"mag": [19.2, 19.3, np.random.uniform(19, 20)],
"magerr": [0.05, 0.06, np.random.uniform(0.01, 0.1)],
"limiting_mag": [20.0, 20.1, 20.2],
"magsys": ["ab", "ab", "ab"],
"filter": ["ztfr", "ztfg", "ztfr"],
"ra": [42.01, 42.01, 42.02],
"dec": [42.02, 42.01, 42.03],
"origin": [None, "lol", "lol"],
'group_ids': [public_group.id],
},
token=upload_data_token_two_groups,
)
assert status == 200
assert data['status'] == 'success'
ids = data["data"]["ids"]
assert len(ids) == 3
status, data = api(
'GET', f'photometry/{ids[0]}?format=flux', token=upload_data_token_two_groups
)
assert status == 200
assert data['status'] == 'success'
group_ids = [g["id"] for g in data['data']['groups']]
assert len(group_ids) == 2
assert public_group.id in group_ids
# PUTing photometry that contains
# the same first point, the second point with a different origin, and a new third point should succeed
# only the last two points will be ingested
status, data = api(
'PUT',
'photometry',
data={
'obj_id': str(public_source.id),
'instrument_id': ztf_camera.id,
"mjd": [59400, 59401],
"mag": [19.2, 19.3],
"magerr": [0.05, 0.06],
"limiting_mag": [20.0, 20.1],
"magsys": ["ab", "ab"],
"filter": ["ztfr", "ztfg"],
"ra": [42.01, 42.01],
"dec": [42.02, 42.01],
"origin": [None, "lol"],
'group_ids': [public_group.id, public_group2.id],
},
token=upload_data_token_two_groups,
)
assert status == 200
assert data['status'] == 'success'
new_ids = data["data"]["ids"]
assert len(new_ids) == 2
assert len(set(new_ids).intersection(set(ids))) == 2
status, data = api(
'GET', f'photometry/{ids[0]}?format=flux', token=upload_data_token_two_groups
)
assert status == 200
assert data['status'] == 'success'
group_ids = [g["id"] for g in data['data']['groups']]
assert len(group_ids) == 3
token_object = (
DBSession()
.query(Token)
.filter(Token.id == upload_data_token_two_groups)
.first()
)
assert sorted(group_ids) == sorted(
[
public_group.id,
public_group2.id,
token_object.created_by.single_user_group.id,
]
)
def test_post_photometry_multiple_groups(
upload_data_token_two_groups,
public_source_two_groups,
public_group,
public_group2,
ztf_camera,
):
upload_data_token = upload_data_token_two_groups
public_source = public_source_two_groups
status, data = api(
'POST',
'photometry',
data={
'obj_id': str(public_source.id),
'mjd': 58000.0,
'instrument_id': ztf_camera.id,
'flux': 12.24,
'fluxerr': 0.031,
'zp': 25.0,
'magsys': 'ab',
'filter': 'ztfg',
'group_ids': [public_group.id, public_group2.id],
},
token=upload_data_token,
)
assert status == 200
assert data['status'] == 'success'
photometry_id = data['data']['ids'][0]
status, data = api(
'GET', f'photometry/{photometry_id}?format=flux', token=upload_data_token
)
assert status == 200
assert data['status'] == 'success'
assert data['data']['ra'] is None
assert data['data']['dec'] is None
assert data['data']['ra_unc'] is None
assert data['data']['dec_unc'] is None
assert len(data['data']['groups']) == 3
np.testing.assert_allclose(
data['data']['flux'], 12.24 * 10 ** (-0.4 * (25.0 - 23.9))
)
def test_post_photometry_all_groups(
upload_data_token_two_groups,
super_admin_token,
public_source_two_groups,
public_group,
public_group2,
ztf_camera,
):
upload_data_token = upload_data_token_two_groups
public_source = public_source_two_groups
status, data = api(
'POST',
'photometry',
data={
'obj_id': str(public_source.id),
'mjd': 58000.0,
'instrument_id': ztf_camera.id,
'flux': 12.24,
'fluxerr': 0.031,
'zp': 25.0,
'magsys': 'ab',
'filter': 'ztfg',
'group_ids': "all",
},
token=upload_data_token,
)
assert status == 200
assert data['status'] == 'success'
photometry_id = data['data']['ids'][0]
status, data = api(
'GET',
f'photometry/{photometry_id}?format=flux',
token=super_admin_token,
)
assert status == 200
assert data['status'] == 'success'
assert data['data']['ra'] is None
assert data['data']['dec'] is None
assert data['data']['ra_unc'] is None
assert data['data']['dec_unc'] is None
assert len(data['data']['groups']) == 2
assert data['data']['groups'][0]['name'] == cfg['misc']['public_group_name']
np.testing.assert_allclose(
data['data']['flux'], 12.24 * 10 ** (-0.4 * (25.0 - 23.9))
)
def test_retrieve_photometry_group_membership_posted_by_other(
upload_data_token_two_groups,
view_only_token,
public_source_two_groups,
public_group,
public_group2,
ztf_camera,
):
upload_data_token = upload_data_token_two_groups
public_source = public_source_two_groups
status, data = api(
'POST',
'photometry',
data={
'obj_id': str(public_source.id),
'mjd': 58000.0,
'instrument_id': ztf_camera.id,
'flux': 12.24,
'fluxerr': 0.031,
'zp': 25.0,
'magsys': 'ab',
'filter': 'ztfg',
'group_ids': [public_group.id, public_group2.id],
},
token=upload_data_token,
)
assert status == 200
assert data['status'] == 'success'
photometry_id = data['data']['ids'][0]
status, data = api(
'GET', f'photometry/{photometry_id}?format=flux', token=view_only_token
)
assert status == 200
assert data['status'] == 'success'
assert data['data']['ra'] is None
assert data['data']['dec'] is None
assert data['data']['ra_unc'] is None
assert data['data']['dec_unc'] is None
np.testing.assert_allclose(
data['data']['flux'], 12.24 * 10 ** (-0.4 * (25.0 - 23.9))
)
def test_retrieve_photometry_error_group_membership_posted_by_other(
upload_data_token_two_groups,
view_only_token,
public_source_two_groups,
public_group,
public_group2,
ztf_camera,
):
upload_data_token = upload_data_token_two_groups
public_source = public_source_two_groups
status, data = api(
'POST',
'photometry',
data={
'obj_id': str(public_source.id),
'mjd': 58000.0,
'instrument_id': ztf_camera.id,
'flux': 12.24,
'fluxerr': 0.031,
'zp': 25.0,
'magsys': 'ab',
'filter': 'ztfg',
'group_ids': [public_group2.id],
},
token=upload_data_token,
)
assert status == 200
assert data['status'] == 'success'
photometry_id = data['data']['ids'][0]
status, data = api(
'GET', f'photometry/{photometry_id}?format=flux', token=view_only_token
)
# `view_only_token only` belongs to `public_group`, not `public_group2`
assert status == 400
assert data['status'] == 'error'
assert "Insufficient permissions" in data['message']
def test_can_post_photometry_no_groups(
upload_data_token, public_source, public_group, ztf_camera
):
status, data = api(
'POST',
'photometry',
data={
'obj_id': str(public_source.id),
'mjd': 58000.0,
'instrument_id': ztf_camera.id,
'flux': 12.24,
'fluxerr': 0.031,
'zp': 25.0,
'magsys': 'ab',
'filter': 'ztfg',
},
token=upload_data_token,
)
assert status == 200
assert data['status'] == 'success'
photometry_id = data['data']['ids'][0]
status, data = api(
'GET', f'photometry/{photometry_id}?format=flux', token=upload_data_token
)
assert status == 200
assert data['status'] == 'success'
assert len(data['data']['groups']) == 1
def test_can_post_photometry_empty_groups_list(
upload_data_token, public_source, public_group, ztf_camera
):
status, data = api(
'POST',
'photometry',
data={
'obj_id': str(public_source.id),
'mjd': 58000.0,
'instrument_id': ztf_camera.id,
'flux': 12.24,
'fluxerr': 0.031,
'zp': 25.0,
'magsys': 'ab',
'filter': 'ztfg',
'group_ids': [],
},
token=upload_data_token,
)
assert status == 200
assert data['status'] == 'success'
photometry_id = data['data']['ids'][0]
status, data = api(
'GET', f'photometry/{photometry_id}?format=flux', token=upload_data_token
)
assert status == 200
assert data['status'] == 'success'
assert len(data['data']['groups']) == 1
def test_token_user_post_mag_photometry_data_and_convert(
upload_data_token, public_source, ztf_camera, public_group
):
status, data = api(
'POST',
'photometry',
data={
'obj_id': str(public_source.id),
'mjd': 58000.0,
'instrument_id': ztf_camera.id,
'mag': 21.0,
'magerr': 0.2,
'limiting_mag': 22.3,
'magsys': 'vega',
'filter': 'ztfg',
'group_ids': [public_group.id],
},
token=upload_data_token,
)
assert status == 200
assert data['status'] == 'success'
photometry_id = data['data']['ids'][0]
status, data = api(
'GET', f'photometry/{photometry_id}?format=flux', token=upload_data_token
)
assert status == 200
assert data['status'] == 'success'
ab = sncosmo.get_magsystem('ab')
vega = sncosmo.get_magsystem('vega')
correction = 2.5 * np.log10(vega.zpbandflux('ztfg') / ab.zpbandflux('ztfg'))
np.testing.assert_allclose(
data['data']['flux'], 10 ** (-0.4 * (21.0 - correction - 23.9))
)
np.testing.assert_allclose(
data['data']['fluxerr'], 0.2 / (2.5 / np.log(10)) * data['data']['flux']
)
status, data = api('GET', f'photometry/{photometry_id}', token=upload_data_token)
assert status == 200
assert data['status'] == 'success'
np.testing.assert_allclose(data['data']['mag'], 21.0 - correction)
np.testing.assert_allclose(data['data']['magerr'], 0.2)
def test_token_user_post_and_get_different_systems_mag(
upload_data_token, public_source, ztf_camera, public_group
):
status, data = api(
'POST',
'photometry',
data={
'obj_id': str(public_source.id),
'mjd': 58000.0,
'instrument_id': ztf_camera.id,
'mag': 21.0,
'magerr': 0.2,
'limiting_mag': 22.3,
'magsys': 'vega',
'filter': 'ztfg',
'group_ids': [public_group.id],
},
token=upload_data_token,
)
assert status == 200
assert data['status'] == 'success'
photometry_id = data['data']['ids'][0]
status, data = api(
'GET',
f'photometry/{photometry_id}?format=mag&magsys=vega',
token=upload_data_token,
)
assert status == 200
assert data['status'] == 'success'
assert data['data']['magsys'] == 'vega'
ab = sncosmo.get_magsystem('ab')
vega = sncosmo.get_magsystem('vega')
correction = 2.5 * np.log10(vega.zpbandflux('ztfg') / ab.zpbandflux('ztfg'))
np.testing.assert_allclose(data['data']['mag'], 21.0)
np.testing.assert_allclose(data['data']['magerr'], 0.2)
np.testing.assert_allclose(data['data']['limiting_mag'], 22.3)
status, data = api(
'GET',
f'photometry/{photometry_id}?format=mag&magsys=ab',
token=upload_data_token,
)
assert status == 200
assert data['status'] == 'success'
np.testing.assert_allclose(data['data']['mag'], 21.0 - correction)
np.testing.assert_allclose(data['data']['magerr'], 0.2)
np.testing.assert_allclose(data['data']['limiting_mag'], 22.3 - correction)
def test_token_user_post_and_get_different_systems_flux(
upload_data_token, public_source, ztf_camera, public_group
):
status, data = api(
'POST',
'photometry',
data={
'obj_id': str(public_source.id),
'mjd': 58000.0,
'instrument_id': ztf_camera.id,
'mag': 21.0,
'magerr': 0.2,
'limiting_mag': 22.3,
'magsys': 'vega',
'filter': 'ztfg',
'group_ids': [public_group.id],
},
token=upload_data_token,
)
assert status == 200
assert data['status'] == 'success'
photometry_id = data['data']['ids'][0]
status, data = api(
'GET',
f'photometry/{photometry_id}?format=flux&magsys=vega',
token=upload_data_token,
)
assert status == 200
assert data['status'] == 'success'
ab = sncosmo.get_magsystem('ab')
vega = sncosmo.get_magsystem('vega')
correction = 2.5 * np.log10(vega.zpbandflux('ztfg') / ab.zpbandflux('ztfg'))
np.testing.assert_allclose(
data['data']['flux'], 10 ** (-0.4 * (21 - correction - 23.9))
)
np.testing.assert_allclose(
data['data']['fluxerr'], 0.2 / (2.5 / np.log(10)) * data['data']['flux']
)
np.testing.assert_allclose(data['data']['zp'], 23.9 + correction)
status, data = api(
'GET',
f'photometry/{photometry_id}?format=flux&magsys=ab',
token=upload_data_token,
)
assert status == 200
assert data['status'] == 'success'
np.testing.assert_allclose(
data['data']['flux'], 10 ** (-0.4 * (21 - correction - 23.9))
)
np.testing.assert_allclose(
data['data']['fluxerr'], 0.2 / (2.5 / np.log(10)) * data['data']['flux']
)
np.testing.assert_allclose(data['data']['zp'], 23.9)
def test_token_user_mixed_photometry_post(
upload_data_token, public_source, ztf_camera, public_group
):
status, data = api(
'POST',
'photometry',
data={
'obj_id': str(public_source.id),
'mjd': 58000.0,
'instrument_id': ztf_camera.id,
'mag': 21.0,
'magerr': [0.2, 0.1],
'limiting_mag': 22.3,
'magsys': 'ab',
'filter': 'ztfg',
'group_ids': [public_group.id],
},
token=upload_data_token,
)
assert status == 200
assert data['status'] == 'success'
photometry_id = data['data']['ids'][1]
status, data = api(
'GET', f'photometry/{photometry_id}?format=flux', token=upload_data_token
)
assert status == 200
assert data['status'] == 'success'
np.testing.assert_allclose(data['data']['flux'], 10 ** (-0.4 * (21.0 - 23.9)))
np.testing.assert_allclose(
data['data']['fluxerr'], 0.1 / (2.5 / np.log(10)) * data['data']['flux']
)
# should fail as len(mag) != len(magerr)
status, data = api(
'POST',
'photometry',
data={
'obj_id': str(public_source.id),
'mjd': 58000.0,
'instrument_id': ztf_camera.id,
'mag': [21.0],
'magerr': [0.2, 0.1],
'limiting_mag': 22.3,
'magsys': 'ab',
'filter': 'ztfg',
'group_ids': [public_group.id],
},
token=upload_data_token,
)
assert status == 400
assert data['status'] == 'error'
def test_token_user_mixed_mag_none_photometry_post( | ):
status, data = api(
'POST',
'photometry',
data={
'obj_id': str(public_source.id),
'mjd': 58000.0,
'instrument_id': ztf_camera.id,
'mag': None,
'magerr': [0.2, 0.1],
'limiting_mag': 22.3,
'magsys': 'ab',
'filter': 'ztfg',
'group_ids': [public_group.id],
},
token=upload_data_token,
)
assert status == 400
assert data['status'] == 'error'
status, data = api(
'POST',
'photometry',
data={
'obj_id': str(public_source.id),
'mjd': 58000.0,
'instrument_id': ztf_camera.id,
'mag': [21.3, None],
'magerr': [0.2, 0.1],
'limiting_mag': 22.3,
'magsys': 'ab',
'filter': 'ztfg',
'group_ids': [public_group.id],
},
token=upload_data_token,
)
assert status == 400
assert data['status'] == 'error'
status, data = api(
'POST',
'photometry',
data={
'obj_id': str(public_source.id),
'mjd': 58000.0,
'instrument_id': ztf_camera.id,
'mag': [21.3, None],
'magerr': [None, 0.1],
'limiting_mag': 22.3,
'magsys': 'ab',
'filter': 'ztfg',
'group_ids': [public_group.id],
},
token=upload_data_token,
)
assert status == 400
assert data['status'] == 'error'
def test_token_user_post_photometry_limits(
upload_data_token, public_source, ztf_camera, public_group
):
status, data = api(
'POST',
'photometry',
data={
'obj_id': str(public_source.id),
'mjd': 58000.0,
'instrument_id': ztf_camera.id,
'mag': None,
'magerr': None,
'limiting_mag': 22.3,
'magsys': 'ab',
'filter': 'ztfg',
'group_ids': [public_group.id],
},
token=upload_data_token,
)
assert status == 200
assert data['status'] == 'success'
photometry_id = data['data']['ids'][0]
status, data = api(
'GET', f'photometry/{photometry_id}?format=flux', token=upload_data_token
)
assert status == 200
assert data['status'] == 'success'
assert data['data']['flux'] is None
np.testing.assert_allclose(
data['data']['fluxerr'], 10 ** (-0.4 * (22.3 - 23.9)) / PHOT_DETECTION_THRESHOLD
)
status, data = api(
'POST',
'photometry',
data={
'obj_id': str(public_source.id),
'mjd': 58000.0,
'instrument_id': ztf_camera.id,
'flux': None,
'fluxerr': 0.031,
'zp': 25.0,
'magsys': 'ab',
'filter': 'ztfg',
'group_ids': [public_group.id],
},
token=upload_data_token,
)
assert status == 200
assert data['status'] == 'success'
photometry_id = data['data']['ids'][0]
status, data = api(
'GET', f'photometry/{photometry_id}?format=flux', token=upload_data_token
)
assert status == 200
assert data['status'] == 'success'
assert data['data']['flux'] is None
np.testing.assert_allclose(
data['data']['fluxerr'], 0.031 * 10 ** (-0.4 * (25.0 - 23.9))
)
def test_token_user_post_invalid_filter(
upload_data_token, public_source, ztf_camera, public_group
):
status, data = api(
'POST',
'photometry',
data={
'obj_id': str(public_source.id),
'mjd': 58000.0,
'instrument_id': ztf_camera.id,
'mag': None,
'magerr': None,
'limiting_mag': 22.3,
'magsys': 'ab',
'filter': 'bessellv',
'group_ids': [public_group.id],
},
token=upload_data_token,
)
assert status == 400
assert data['status'] == 'error'
def test_token_user_post_photometry_data_series(
upload_data_token, public_source, ztf_camera, public_group
):
# valid request
status, data = api(
'POST',
'photometry',
data={
'obj_id': str(public_source.id),
'mjd': [58000.0, 58001.0, 58002.0],
'instrument_id': ztf_camera.id,
'flux': [12.24, 15.24, 12.24],
'fluxerr': [0.031, 0.029, 0.030],
'filter': ['ztfg', 'ztfg', 'ztfg'],
'zp': [25.0, 30.0, 21.2],
'magsys': ['ab', 'ab', 'ab'],
'ra': 264.1947917,
'dec': [50.5478333, 50.5478333 + 0.00001, 50.5478333],
'dec_unc': 0.2,
'group_ids': [public_group.id],
},
token=upload_data_token,
)
assert status == 200
assert data['status'] == 'success'
assert len(data['data']['ids']) == 3
photometry_id = data['data']['ids'][1]
status, data = api(
'GET', f'photometry/{photometry_id}?format=flux', token=upload_data_token
)
assert status == 200
assert data['status'] == 'success'
assert np.allclose(data['data']['flux'], 15.24 * 10 ** (-0.4 * (30 - 23.9)))
assert np.allclose(data['data']['dec'], 50.5478333 + 0.00001)
assert np.allclose(data['data']['dec_unc'], 0.2)
assert data['data']['ra_unc'] is None
# invalid request
status, data = api(
'POST',
'photometry',
data=[
{
'obj_id': str(public_source.id),
'mjd': 58000,
'instrument_id': ztf_camera.id,
'flux': 12.24,
'fluxerr': 0.031,
'filter': 'ztfg',
'zp': 25.0,
'magsys': 'ab',
'group_ids': [public_group.id],
},
{
'obj_id': str(public_source.id),
'mjd': 58001,
'instrument_id': ztf_camera.id,
'flux': 15.24,
'fluxerr': 0.031,
'filter': 'ztfg',
'zp': 30.0,
'magsys': 'ab',
'group_ids': [public_group.id],
},
{
'obj_id': str(public_source.id),
'mjd': 58002,
'instrument_id': ztf_camera.id,
'flux': 12.24,
'fluxerr': 0.031,
'filter': 'ztfg',
'zp': 21.2,
'magsys': 'vega',
'group_ids': [public_group.id],
},
],
token=upload_data_token,
)
assert status == 400
assert data['status'] == 'error'
def test_post_photometry_no_access_token(
view_only_token, public_source, ztf_camera, public_group
):
status, data = api(
'POST',
'photometry',
data={
'obj_id': str(public_source.id),
'mjd': 58000.0,
'instrument_id': ztf_camera.id,
'flux': 12.24,
'fluxerr': 0.031,
'zp': 25.0,
'magsys': 'ab',
'filter': 'ztfg',
'group_ids': [public_group.id],
},
token=view_only_token,
)
assert status == 400
assert data['status'] == 'error'
def test_token_user_update_photometry(
upload_data_token, public_source, ztf_camera, public_group
):
status, data = api(
'POST',
'photometry',
data={
'obj_id': str(public_source.id),
'mjd': 58000.0,
'instrument_id': ztf_camera.id,
'flux': 12.24,
'fluxerr': 0.031,
'zp': 25.0,
'magsys': 'ab',
'filter': 'ztfi',
'group_ids': [public_group.id],
},
token=upload_data_token,
)
assert status == 200
assert data['status'] == 'success'
photometry_id = data['data']['ids'][0]
status, data = api(
'GET', f'photometry/{photometry_id}?format=flux', token=upload_data_token
)
assert status == 200
assert data['status'] == 'success'
np.testing.assert_allclose(data['data']['flux'], 12.24 * 10 ** (-0.4 * (25 - 23.9)))
status, data = api(
'PATCH',
f'photometry/{photometry_id}',
data={
'obj_id': str(public_source.id),
'flux': 11.0,
'mjd': 58000.0,
'instrument_id': ztf_camera.id,
'fluxerr': 0.031,
'zp': 25.0,
'magsys': 'ab',
'filter': 'ztfi',
},
token=upload_data_token,
)
assert status == 200
assert data['status'] == 'success'
status, data = api(
'GET', f'photometry/{photometry_id}?format=flux', token=upload_data_token
)
np.testing.assert_allclose(data['data']['flux'], 11.0 * 10 ** (-0.4 * (25 - 23.9)))
def test_token_user_cannot_update_unowned_photometry(
upload_data_token, manage_sources_token, public_source, ztf_camera, public_group
):
status, data = api(
'POST',
'photometry',
data={
'obj_id': str(public_source.id),
'mjd': 58000.0,
'instrument_id': ztf_camera.id,
'flux': 12.24,
'fluxerr': 0.031,
'zp': 25.0,
'magsys': 'ab',
'filter': 'ztfi',
'group_ids': [public_group.id],
},
token=upload_data_token,
)
assert status == 200
assert data['status'] == 'success'
photometry_id = data['data']['ids'][0]
status, data = api(
'GET', f'photometry/{photometry_id}?format=flux', token=upload_data_token
)
assert status == 200
assert data['status'] == 'success'
np.testing.assert_allclose(data['data']['flux'], 12.24 * 10 ** (-0.4 * (25 - 23.9)))
status, data = api(
'PATCH',
f'photometry/{photometry_id}',
data={
'obj_id': str(public_source.id),
'flux': 11.0,
'mjd': 58000.0,
'instrument_id': ztf_camera.id,
'fluxerr': 0.031,
'zp': 25.0,
'magsys': 'ab',
'filter': 'ztfi',
},
token=manage_sources_token,
)
assert status == 400
def test_token_user_update_photometry_groups(
upload_data_token_two_groups,
manage_sources_token_two_groups,
public_source_two_groups,
ztf_camera,
public_group,
public_group2,
view_only_token,
):
upload_data_token = upload_data_token_two_groups
public_source = public_source_two_groups
status, data = api(
'POST',
'photometry',
data={
'obj_id': str(public_source.id),
'mjd': 58000.0,
'instrument_id': ztf_camera.id,
'flux': 12.24,
'fluxerr': 0.031,
'zp': 25.0,
'magsys': 'ab',
'filter': 'ztfi',
'group_ids': [public_group.id, public_group2.id],
},
token=upload_data_token,
)
assert status == 200
assert data['status'] == 'success'
photometry_id = data['data']['ids'][0]
status, data = api(
'GET', f'photometry/{photometry_id}?format=flux', token=view_only_token
)
assert status == 200
assert data['status'] == 'success'
status, data = api(
'PATCH',
f'photometry/{photometry_id}',
data={
'obj_id': str(public_source.id),
'flux': 11.0,
'mjd': 58000.0,
'instrument_id': ztf_camera.id,
'fluxerr': 0.031,
'zp': 25.0,
'magsys': 'ab',
'filter': 'ztfi',
'group_ids': [public_group2.id],
},
token=upload_data_token,
)
assert status == 200
assert data['status'] == 'success'
status, data = api(
'GET', f'photometry/{photometry_id}?format=flux', token=view_only_token
)
assert status == 400
assert data['status'] == 'error'
assert "Insufficient permissions" in data["message"]
def test_user_can_delete_owned_photometry_data(
upload_data_token, public_source, ztf_camera, public_group
):
status, data = api(
'POST',
'photometry',
data={
'obj_id': str(public_source.id),
'mjd': 58000.0,
'instrument_id': ztf_camera.id,
'flux': 12.24,
'fluxerr': 0.031,
'zp': 25.0,
'magsys': 'ab',
'filter': 'ztfi',
'group_ids': [public_group.id],
},
token=upload_data_token,
)
assert status == 200
assert data['status'] == 'success'
photometry_id = data['data']['ids'][0]
status, data = api(
'GET', f'photometry/{photometry_id}?format=flux', token=upload_data_token
)
assert status == 200
assert data['status'] == 'success'
np.testing.assert_allclose(data['data']['flux'], 12.24 * 10 ** (-0.4 * (25 - 23.9)))
status, data = api('DELETE', f'photometry/{photometry_id}', token=upload_data_token)
assert status == 200
status, data = api(
'GET', f'photometry/{photometry_id}?format=flux', token=upload_data_token
)
assert status == 400
def test_user_cannot_delete_unowned_photometry_data(
upload_data_token, manage_sources_token, public_source, ztf_camera, public_group
):
status, data = api(
'POST',
'photometry',
data={
'obj_id': str(public_source.id),
'mjd': 58000.0,
'instrument_id': ztf_camera.id,
'flux': 12.24,
'fluxerr': 0.031,
'zp': 25.0,
'magsys': 'ab',
'filter': 'ztfi',
'group_ids': [public_group.id],
},
token=upload_data_token,
)
assert status == 200
assert data['status'] == 'success'
photometry_id = data['data']['ids'][0]
status, data = api(
'GET', f'photometry/{photometry_id}?format=flux', token=upload_data_token
)
assert status == 200
assert data['status'] == 'success'
np.testing.assert_allclose(data['data']['flux'], 12.24 * 10 ** (-0.4 * (25 - 23.9)))
status, data = api(
'DELETE', f'photometry/{photometry_id}', token=manage_sources_token
)
assert status == 400
def test_admin_can_delete_unowned_photometry_data(
upload_data_token, super_admin_token, public_source, ztf_camera, public_group
):
status, data = api(
'POST',
'photometry',
data={
'obj_id': str(public_source.id),
'mjd': 58000.0,
'instrument_id': ztf_camera.id,
'flux': 12.24,
'fluxerr': 0.031,
'zp': 25.0,
'magsys': 'ab',
'filter': 'ztfi',
'group_ids': [public_group.id],
},
token=upload_data_token,
)
assert status == 200
assert data['status'] == 'success'
photometry_id = data['data']['ids'][0]
status, data = api(
'GET', f'photometry/{photometry_id}?format=flux', token=upload_data_token
)
assert status == 200
assert data['status'] == 'success'
np.testing.assert_allclose(data['data']['flux'], 12.24 * 10 ** (-0.4 * (25 - 23.9)))
status, data = api('DELETE', f'photometry/{photometry_id}', token=super_admin_token)
assert status == 200
status, data = api(
'GET', f'photometry/{photometry_id}?format=flux', token=upload_data_token
)
assert status == 400
def test_token_user_retrieving_source_photometry_and_convert(
view_only_token, public_source
):
status, data = api(
'GET',
f'sources/{public_source.id}/photometry?format=flux&magsys=ab',
token=view_only_token,
)
assert status == 200
assert data['status'] == 'success'
assert isinstance(data['data'], list)
assert 'mjd' in data['data'][0]
assert 'ra_unc' in data['data'][0]
data['data'] = sorted(data['data'], key=lambda d: d['mjd'])
mag1_ab = -2.5 * np.log10(data['data'][0]['flux']) + data['data'][0]['zp']
magerr1_ab = 2.5 / np.log(10) * data['data'][0]['fluxerr'] / data['data'][0]['flux']
maglast_ab = -2.5 * np.log10(data['data'][-1]['flux']) + data['data'][-1]['zp']
magerrlast_ab = (
2.5 / np.log(10) * data['data'][-1]['fluxerr'] / data['data'][-1]['flux']
)
status, data = api(
'GET',
f'sources/{public_source.id}/photometry?format=mag&magsys=ab',
token=view_only_token,
)
assert status == 200
assert data['status'] == 'success'
data['data'] = sorted(data['data'], key=lambda d: d['mjd'])
assert np.allclose(mag1_ab, data['data'][0]['mag'])
assert np.allclose(magerr1_ab, data['data'][0]['magerr'])
assert np.allclose(maglast_ab, data['data'][-1]['mag'])
assert np.allclose(magerrlast_ab, data['data'][-1]['magerr'])
status, data = api(
'GET',
f'sources/{public_source.id}/photometry?format=flux&magsys=vega',
token=view_only_token,
)
data['data'] = sorted(data['data'], key=lambda d: d['mjd'])
mag1_vega = -2.5 * np.log10(data['data'][0]['flux']) + data['data'][0]['zp']
magerr1_vega = (
2.5 / np.log(10) * data['data'][0]['fluxerr'] / data['data'][0]['flux']
)
maglast_vega = -2.5 * np.log10(data['data'][-1]['flux']) + data['data'][-1]['zp']
magerrlast_vega = (
2.5 / np.log(10) * data['data'][-1]['fluxerr'] / data['data'][-1]['flux']
)
assert status == 200
assert data['status'] == 'success'
ab = sncosmo.get_magsystem('ab')
vega = sncosmo.get_magsystem('vega')
vega_to_ab = {
filter: 2.5 * np.log10(ab.zpbandflux(filter) / vega.zpbandflux(filter))
for filter in ['ztfg', 'ztfr', 'ztfi']
}
assert np.allclose(mag1_ab, mag1_vega + vega_to_ab[data['data'][0]['filter']])
assert np.allclose(magerr1_ab, magerr1_vega)
assert np.allclose(
maglast_ab, maglast_vega + vega_to_ab[data['data'][-1]['filter']]
)
assert np.allclose(magerrlast_ab, magerrlast_vega)
def test_token_user_retrieve_null_photometry(
upload_data_token, public_source, ztf_camera, public_group
):
status, data = api(
'POST',
'photometry',
data={
'obj_id': str(public_source.id),
'mjd': 58000.0,
'instrument_id': ztf_camera.id,
'mag': None,
'magerr': None,
'limiting_mag': 22.3,
'magsys': 'ab',
'filter': 'ztfg',
'group_ids': [public_group.id],
},
token=upload_data_token,
)
assert status == 200
assert data['status'] == 'success'
photometry_id = data['data']['ids'][0]
status, data = api(
'GET', f'photometry/{photometry_id}?format=flux', token=upload_data_token
)
assert status == 200
assert data['status'] == 'success'
assert data['data']['flux'] is None
np.testing.assert_allclose(
data['data']['fluxerr'], 10 ** (-0.4 * (22.3 - 23.9)) / PHOT_DETECTION_THRESHOLD
)
status, data = api(
'GET', f'photometry/{photometry_id}?format=mag', token=upload_data_token
)
assert status == 200
assert data['status'] == 'success'
assert data['data']['mag'] is None
assert data['data']['magerr'] is None
def test_token_user_big_post(
upload_data_token, public_source, ztf_camera, public_group
):
status, data = api(
'POST',
'photometry',
data={
'obj_id': str(public_source.id),
'mjd': [58000 + i for i in range(50000)],
'instrument_id': ztf_camera.id,
'mag': np.random.uniform(low=18, high=22, size=50000).tolist(),
'magerr': np.random.uniform(low=0.1, high=0.3, size=50000).tolist(),
'limiting_mag': 22.3,
'magsys': 'ab',
'filter': 'ztfg',
'group_ids': [public_group.id],
},
token=upload_data_token,
)
assert status == 200
assert data['status'] == 'success'
def test_token_user_get_range_photometry(
upload_data_token, public_source, public_group, ztf_camera
):
status, data = api(
'POST',
'photometry',
data={
'obj_id': str(public_source.id),
'mjd': [58000.0, 58500.0, 59000.0],
'instrument_id': ztf_camera.id,
'flux': 12.24,
'fluxerr': 0.031,
'zp': 25.0,
'magsys': 'ab',
'filter': 'ztfg',
'group_ids': [public_group.id],
},
token=upload_data_token,
)
assert status == 200
assert data['status'] == 'success'
status, data = api(
'GET',
'photometry/range',
token=upload_data_token,
data={'instrument_ids': [ztf_camera.id], 'max_date': '2018-05-15T00:00:00'},
)
assert status == 200
assert data['status'] == 'success'
assert len(data['data']) == 1
status, data = api(
'GET',
'photometry/range?format=flux&magsys=vega',
token=upload_data_token,
data={'instrument_ids': [ztf_camera.id], 'max_date': '2019-02-01T00:00:00'},
)
assert status == 200
assert data['status'] == 'success'
assert len(data['data']) == 2
def test_reject_photometry_inf(
upload_data_token, public_source, public_group, ztf_camera
):
status, data = api(
'POST',
'photometry',
data={
'obj_id': str(public_source.id),
'mjd': [58000.0, 58500.0, 59000.0],
'instrument_id': ztf_camera.id,
'flux': math.inf,
'fluxerr': math.inf,
'zp': 25.0,
'magsys': 'ab',
'filter': 'ztfg',
'group_ids': [public_group.id],
},
token=upload_data_token,
)
assert status == 400
assert data['status'] == 'error'
status, data = api(
'POST',
'photometry',
data={
'obj_id': str(public_source.id),
'mjd': 58000.0,
'instrument_id': ztf_camera.id,
'mag': math.inf,
'magerr': math.inf,
'limiting_mag': 22.3,
'magsys': 'vega',
'filter': 'ztfg',
'group_ids': [public_group.id],
},
token=upload_data_token,
)
assert status == 400
assert data['status'] == 'error'
status, data = api(
'POST',
'photometry',
data={
'obj_id': str(public_source.id),
'mjd': 58000.0,
'instrument_id': ztf_camera.id,
'mag': 2.0,
'magerr': 23.0,
'limiting_mag': math.inf,
'magsys': 'vega',
'filter': 'ztfg',
'group_ids': [public_group.id],
},
token=upload_data_token,
)
assert status == 400
assert data['status'] == 'error'
status, data = api(
'POST',
'photometry',
data={
'obj_id': str(public_source.id),
'mjd': 58000.0,
'instrument_id': ztf_camera.id,
'mag': None,
'magerr': None,
'limiting_mag': -math.inf,
'magsys': 'vega',
'filter': 'ztfg',
'group_ids': [public_group.id],
},
token=upload_data_token,
)
assert status == 400
assert data['status'] == 'error'
status, data = api(
'POST',
'photometry',
data={
'obj_id': str(public_source.id),
'mjd': [58000.0, 58500.0, 59000.0],
'instrument_id': ztf_camera.id,
'flux': None,
'fluxerr': math.inf,
'zp': 25.0,
'magsys': 'ab',
'filter': 'ztfg',
'group_ids': [public_group.id],
},
token=upload_data_token,
)
assert status == 400
assert data['status'] == 'error'
def test_token_user_post_to_foreign_group_and_retrieve(
upload_data_token, public_source_two_groups, public_group2, ztf_camera
):
status, data = api(
'POST',
'photometry',
data={
'obj_id': str(public_source_two_groups.id),
'mjd': [58000.0, 58500.0, 59000.0],
'instrument_id': ztf_camera.id,
'flux': 12.24,
'fluxerr': 0.031,
'zp': 25.0,
'magsys': 'ab',
'filter': 'ztfg',
'group_ids': [public_group2.id],
},
token=upload_data_token,
)
assert status == 200
assert data['status'] == 'success'
photometry_id = data['data']['ids'][0]
status, data = api(
'GET', f'photometry/{photometry_id}?format=flux', token=upload_data_token
)
assert status == 200
def test_problematic_photometry_1263(
upload_data_token, public_source, public_group, ztf_camera, public_group2
):
payload = {
"obj_id": public_source.id,
"group_ids": [public_group.id, public_group2.id],
"magsys": "ab",
"zp": 23.9,
"instrument_id": ztf_camera.id,
'mjd': [
59145.46447,
59149.50347,
59149.50347,
59150.50872,
59150.50872,
59152.51631,
59155.50801,
59152.51631,
59155.50801,
59156.48479,
59156.48479,
59126.48693,
59128.46834,
59130.50257,
59135.47329,
59137.4758,
59139.45454,
59141.47449,
59143.50987,
59143.50987,
59145.46447,
59145.50556,
59150.52806,
59150.52806,
59151.52116,
59151.52116,
59152.48332,
59152.48332,
59155.50022,
59155.50022,
59156.5383,
59126.53144,
59128.51928,
59130.53196,
59135.51196,
59137.51334,
59139.51507,
59141.51422,
59143.48529,
59143.48529,
59145.50556,
],
'filter': [
'ztfg',
'ztfg',
'ztfg',
'ztfg',
'ztfg',
'ztfg',
'ztfg',
'ztfg',
'ztfg',
'ztfg',
'ztfg',
'ztfg',
'ztfg',
'ztfg',
'ztfg',
'ztfg',
'ztfg',
'ztfg',
'ztfg',
'ztfg',
'ztfg',
'ztfr',
'ztfr',
'ztfr',
'ztfr',
'ztfr',
'ztfr',
'ztfr',
'ztfr',
'ztfr',
'ztfr',
'ztfr',
'ztfr',
'ztfr',
'ztfr',
'ztfr',
'ztfr',
'ztfr',
'ztfr',
'ztfr',
'ztfr',
],
'flux': [
105.4095462,
100.4989583,
100.4986052,
97.45052422,
97.45411937,
91.71425204,
81.08011148,
91.71489652,
81.08110854,
59.37327478,
59.37452643,
None,
None,
None,
73.17457336,
82.20150344,
89.14970986,
102.1692537,
98.6103674,
98.60984771,
105.4086204,
100.8602976,
94.84847105,
94.85063718,
104.8945366,
104.8961951,
101.6093671,
101.6061542,
82.34545782,
82.34560248,
72.48165796,
None,
None,
None,
61.60270207,
72.73101786,
83.83015488,
98.70066264,
99.85275375,
99.84977174,
100.8608292,
],
'fluxerr': [
8.416851743,
10.10817406,
10.10811785,
11.74314252,
11.74356103,
11.40505647,
10.61680918,
11.40514417,
10.61696199,
10.6736128,
10.67382477,
13.51668635,
18.71327665,
9.509339593,
9.374956127,
9.638764985,
11.98599464,
10.42671307,
9.666542673,
9.666476165,
8.41682049,
8.680180822,
9.926401394,
9.926617677,
8.494021784,
8.494115051,
9.984017125,
9.983686084,
7.964270439,
7.964306468,
8.499519049,
12.65289244,
11.39803573,
9.771246706,
7.839855173,
7.592658663,
8.674127848,
8.965488502,
7.69135795,
7.691126885,
8.680212034,
],
}
status, data = api(
'POST',
'photometry',
data=payload,
token=upload_data_token,
)
assert status == 200
assert data['status'] == 'success'
payload = {
"obj_id": public_source.id,
"group_ids": "all",
"magsys": "ab",
"instrument_id": ztf_camera.id,
"filter": [
"ztfr",
"ztfg",
"ztfr",
"ztfg",
"ztfr",
"ztfg",
"ztfr",
"ztfg",
"ztfr",
"ztfr",
"ztfg",
"ztfg",
"ztfr",
"ztfg",
"ztfg",
"ztfr",
"ztfr",
"ztfr",
"ztfg",
"ztfr",
"ztfg",
"ztfg",
"ztfr",
],
"mjd": [
59130.53195599979,
59135.473286999855,
59135.51195599977,
59137.47579859989,
59137.51334490022,
59139.45453700004,
59139.51506939996,
59141.474490699824,
59141.51422449993,
59143.48528939998,
59143.50987270009,
59145.46446759999,
59145.50555559993,
59149.50347220013,
59150.50871529989,
59150.52805559989,
59151.52115740022,
59152.4833217999,
59152.516307900194,
59155.50021990016,
59155.5080093001,
59156.4847916998,
59156.53829859989,
],
"limiting_mag": [
19.67770004272461,
20.11709976196289,
20.059200286865234,
20.281099319458008,
20.224000930786133,
19.809099197387695,
20.236799240112305,
20.57659912109375,
20.31290054321289,
20.414499282836914,
20.680700302124023,
20.57069969177246,
20.48349952697754,
20.242000579833984,
20.642900466918945,
20.029699325561523,
20.11090087890625,
19.808948516845703,
19.819171905517578,
19.9112606048584,
19.913991928100586,
19.600677490234375,
20.005773544311523,
],
"mag": [
None,
19.239099502563477,
19.426000595092773,
19.11280059814453,
19.24570083618164,
19.024700164794922,
19.09149932861328,
18.876699447631836,
18.914199829101562,
18.901599884033203,
18.915199279785156,
18.84280014038086,
18.89069938659668,
18.89459991455078,
18.92799949645996,
18.957399368286133,
18.848100662231445,
18.882665634155273,
18.993907928466797,
19.110898971557617,
19.127714157104492,
19.466022491455078,
19.24942970275879,
],
"magerr": [
None,
0.1391019970178604,
0.13817599415779114,
0.12731100618839264,
0.11334399878978729,
0.1459749937057495,
0.11234399676322937,
0.11080300062894821,
0.09862300008535385,
0.0836310014128685,
0.1064319983124733,
0.08669500052928925,
0.09344000369310379,
0.10920300334692001,
0.13083499670028687,
0.11362800002098083,
0.08791899681091309,
0.1066831648349762,
0.13501590490341187,
0.10501029342412949,
0.14216870069503784,
0.19518424570560455,
0.12731821835041046,
],
"ra": [
None,
134.5934039,
134.5934169,
134.5933773,
134.593404,
134.593372,
134.5933825,
134.5933984,
134.5933945,
134.5933917,
134.5933988,
134.5933848,
134.5933991,
134.5933909,
134.5934048,
134.5934296,
134.5934341,
134.593388,
134.5933606,
134.5933857,
134.5933939,
134.5933847,
134.5933954,
],
"dec": [
None,
15.0412865,
15.041256,
15.0412686,
15.0412482,
15.0412709,
15.0412572,
15.0412656,
15.0412765,
15.0412744,
15.0412673,
15.041271,
15.0412726,
15.0413061,
15.0412751,
15.041267,
15.0412856,
15.0412655,
15.0412913,
15.0412952,
15.0412737,
15.0411913,
15.0412605,
],
}
status, data = api(
'POST',
'photometry',
data=payload,
token=upload_data_token,
)
assert status == 200
assert data['status'] == 'success'
payload['group_ids'] = 'all'
status, data = api(
'PUT',
'photometry',
data=payload,
token=upload_data_token,
)
assert status == 200
assert data['status'] == 'success'
for id in data['data']['ids']:
status, data = api(
'GET', f'photometry/{id}?format=flux', token=upload_data_token
)
assert status == 200
assert data['status'] == 'success'
assert len(data['data']['groups']) == 2
def test_problematic_photometry_1276(
public_source, public_group, super_admin_token, ztf_camera
):
payload = {
"obj_id": public_source.id,
"group_ids": [public_group.id],
"magsys": "ab",
"instrument_id": ztf_camera.id,
"filter": [
"ztfg",
"ztfr",
"ztfr",
"ztfr",
"ztfr",
"ztfr",
"ztfr",
"ztfr",
"ztfg",
"ztfr",
"ztfr",
"ztfg",
"ztfg",
"ztfr",
"ztfg",
"ztfr",
"ztfr",
"ztfr",
"ztfg",
"ztfg",
"ztfg",
"ztfg",
"ztfr",
"ztfr",
"ztfg",
"ztfg",
"ztfr",
"ztfg",
"ztfr",
],
"mjd": [
59123.41299769981,
59129.472291700076,
59134.451203700155,
59136.46903940011,
59136.46903940011,
59139.295057899784,
59139.295057899784,
59139.295057899784,
59139.389629600104,
59141.36341439979,
59141.36341439979,
59141.414189800154,
59141.414189800154,
59143.318460599985,
59143.39145829994,
59145.34545140015,
59145.34545140015,
59145.34545140015,
59145.41583329998,
59145.41583329998,
59149.4703819002,
59151.32671299996,
59151.33918979997,
59153.33692129981,
59153.404351899866,
59155.220972199924,
59155.290161999874,
59157.360347200185,
59157.433634299785,
],
"limiting_mag": [
19.396099090576172,
20.23240089416504,
20.129100799560547,
20.493600845336914,
20.493600845336914,
20.422000885009766,
20.422000885009766,
20.422000885009766,
20.272199630737305,
20.18910026550293,
20.18910026550293,
20.846799850463867,
20.846799850463867,
20.624300003051758,
20.854000091552734,
20.628799438476562,
20.628799438476562,
20.628799438476562,
20.840900421142578,
20.840900421142578,
20.32859992980957,
19.60849952697754,
19.705799102783203,
19.47800064086914,
19.409400939941406,
19.462600708007812,
19.77630043029785,
19.678672790527344,
19.754121780395508,
],
"mag": [
18.43560028076172,
17.338199615478516,
16.25189971923828,
16.011999130249023,
16.09589958190918,
15.974100112915039,
15.891500473022461,
15.891500473022461,
None,
15.753999710083008,
15.819600105285645,
18.528499603271484,
18.57939910888672,
15.781000137329102,
18.309499740600586,
15.692399978637695,
15.692399978637695,
15.790599822998047,
18.305700302124023,
18.31529998779297,
18.13994026184082,
18.040000915527344,
15.505499839782715,
15.569299697875977,
17.812599182128906,
18.046100616455078,
None,
17.95865249633789,
15.475956916809082,
],
"magerr": [
0.18098600208759308,
0.12704600393772125,
0.03412500023841858,
0.018530000001192093,
0.09321600198745728,
0.1358170062303543,
0.017785999923944473,
0.017785999923944473,
None,
0.017010999843478203,
0.0650859996676445,
0.1969199925661087,
0.08772700279951096,
0.05595200136303902,
0.17250700294971466,
0.0137339998036623,
0.0137339998036623,
0.06520400196313858,
0.06727799773216248,
0.13235700130462646,
0.12975013256072998,
0.11010699719190598,
0.04597700014710426,
0.049855999648571014,
0.10752200335264206,
0.13239599764347076,
None,
0.139614999294281,
0.042450759559869766,
],
"ra": [
56.0478815,
56.0468989,
56.0478,
56.0478343,
56.0480658,
56.0475873,
56.047908,
56.0480877,
None,
56.0476469,
56.0477499,
56.047177,
56.0469751,
56.0480999,
56.0470656,
56.0477652,
56.0476761,
56.0476218,
56.0469908,
56.0472491,
56.0467978,
56.0472009,
56.0478524,
56.0476997,
56.0471999,
56.0476057,
None,
56.0473734,
56.0477336,
],
"dec": [
71.6368125,
71.6367721,
71.6367167,
71.6367615,
71.6367048,
71.6368681,
71.6368457,
71.6368389,
None,
71.6367596,
71.6365229,
71.6367611,
71.6368439,
71.6367764,
71.6368222,
71.6367943,
71.6368108,
71.6367366,
71.6368412,
71.6367895,
71.6368039,
71.6367984,
71.6367866,
71.6367788,
71.6368348,
71.6367571,
None,
71.6367753,
71.6367119,
],
}
status, data = api(
'PUT',
'photometry',
data=payload,
token=super_admin_token,
)
assert status == 400
assert data['status'] == 'error' | upload_data_token, public_source, ztf_camera, public_group |
auth.guard.ts | import { Injectable } from '@angular/core';
import { CanActivate } from '@angular/router';
import { Router } from '@angular/router'; | constructor(private router: Router) {}
canActivate() {
if (localStorage.getItem('jwt')) {
return true;
}
this.router.navigate(['/login']);
return false;
}
} |
@Injectable()
export class AuthGuard implements CanActivate { |
Util.js | /**
* @param {any} obj
* @returns {any}
*/
export function findPos(obj) {
let curleft = 0;
let curtop = 0;
if (obj.offsetParent) {
do {
curleft += obj.offsetLeft;
curtop += obj.offsetTop;
// eslint-disable-next-line no-cond-assign
} while (obj = obj.offsetParent);
return { left: curleft, top: curtop };
}
return { left: curleft, top: curtop };
}
export function isObject(item) {
return (item && typeof item === 'object' && !Array.isArray(item));
}
export function mergeDeep(target, source) {
const output = Object.assign({}, target);
if (isObject(target) && isObject(source)) {
Object.keys(source).forEach((key) => {
if (isObject(source[key])) {
if (!(key in target))
Object.assign(output, { [key]: source[key] });
else
output[key] = mergeDeep(target[key], source[key]);
} else {
Object.assign(output, { [key]: source[key] });
}
});
}
return output;
}
/**
* Get the closest matching element up the DOM tree.
* @param {Element} elem Starting element
* @param {String} selector Selector to match against (class, ID, data attribute, or tag)
* @return {Boolean|Element} Returns null if not match found
*/
export function getClosest(elem, selector) {
const firstChar = selector.charAt(0);
const supports = 'classList' in document.documentElement;
let attribute; let value;
// If selector is a data attribute, split attribute from value
if (firstChar === '[') {
selector = selector.substr(1, selector.length - 2);
attribute = selector.split('=');
if (attribute.length > 1) {
value = true;
attribute[1] = attribute[1].replace(/"/g, '').replace(/'/g, '');
}
}
// Get closest match
for (; elem && elem !== document && elem.nodeType === 1; elem = elem.parentNode) {
// If selector is a class
if (firstChar === '.') {
if (supports) {
if (elem.classList.contains(selector.substr(1))) {
return elem;
}
} else {
if (new RegExp('(^|\\s)' + selector.substr(1) + '(\\s|$)').test(elem.className)) {
return elem;
}
}
}
// If selector is an ID
if (firstChar === '#') {
if (elem.id === selector.substr(1)) {
return elem;
}
}
// If selector is a data attribute
if (firstChar === '[') {
if (elem.hasAttribute(attribute[0])) {
if (value) {
if (elem.getAttribute(attribute[0]) === attribute[1]) {
return elem;
}
} else {
return elem;
}
}
}
// If selector is a tag
if (elem.tagName.toLowerCase() === selector) {
return elem;
}
}
return null;
}
/**
* Get all DOM element up the tree that contain a class, ID, or data attribute
* @param {Node} elem The base element
* @param {String} selector The class, id, data attribute, or tag to look for
* @return {Array} Null if no match
*/
export function | (elem, selector) {
const parents = [];
const firstChar = selector?.charAt(0);
// Get matches
for ( ; elem && elem !== document; elem = elem.parentNode ) {
if ( selector ) {
// If selector is a class
if ( firstChar === '.' ) {
if ( elem.classList.contains( selector.substr(1) ) ) {
parents.push( elem );
}
}
// If selector is an ID
if ( firstChar === '#' ) {
if ( elem.id === selector.substr(1) ) {
parents.push( elem );
}
}
// If selector is a data attribute
if ( firstChar === '[' ) {
if ( elem.hasAttribute( selector.substr(1, selector.length - 1) ) ) {
parents.push( elem );
}
}
// If selector is a tag
if ( elem.tagName.toLowerCase() === selector ) {
parents.push( elem );
}
} else {
parents.push( elem );
}
}
// Return parents if any exist
return parents.length? parents : null;
}
export function getParentsUntil(elem, parent, selector) {
const parents = [];
const parentType = parent?.charAt(0);
const selectorType = selector?.selector.charAt(0);
// Get matches
for ( ; elem && elem !== document; elem = elem.parentNode ) {
// Check if parent has been reached
if ( parent ) {
// If parent is a class
if ( parentType === '.' ) {
if ( elem.classList.contains( parent.substr(1) ) ) {
break;
}
}
// If parent is an ID
if ( parentType === '#' ) {
if ( elem.id === parent.substr(1) ) {
break;
}
}
// If parent is a data attribute
if ( parentType === '[' ) {
if ( elem.hasAttribute( parent.substr(1, parent.length - 1) ) ) {
break;
}
}
// If parent is a tag
if ( elem.tagName.toLowerCase() === parent ) {
break;
}
}
if ( selector ) {
// If selector is a class
if ( selectorType === '.' ) {
if ( elem.classList.contains( selector.substr(1) ) ) {
parents.push( elem );
}
}
// If selector is an ID
if ( selectorType === '#' ) {
if ( elem.id === selector.substr(1) ) {
parents.push( elem );
}
}
// If selector is a data attribute
if ( selectorType === '[' ) {
if ( elem.hasAttribute( selector.substr(1, selector.length - 1) ) ) {
parents.push( elem );
}
}
// If selector is a tag
if ( elem.tagName.toLowerCase() === selector ) {
parents.push( elem );
}
} else {
parents.push( elem );
}
}
// Return parents if any exist
return parents.length? parents : null;
} | getParents |
sentence_splitting.py | from pathlib import Path
from typing import Optional
from recording_script_generator.app.helper import (
raise_error_if_directory_exists_and_not_overwrite,
raise_error_if_directory_not_exists)
from recording_script_generator.app.io import (load_reading_passages,
load_reading_passages_paths,
load_selection,
save_reading_passages,
save_reading_passages_paths,
save_representations,
save_selection)
from recording_script_generator.core.sentence_splitting import main_inplace
from recording_script_generator.globals import (DEFAULT_CHUNKSIZE_FILES,
DEFAULT_MAXTASKSPERCHILD,
DEFAULT_N_JOBS,
DEFAULT_OVERWRITE) |
def app_split_sentences(working_directory: Path, custom_output_directory: Optional[Path] = None, n_jobs: int = DEFAULT_N_JOBS, maxtasksperchild: Optional[int] = DEFAULT_MAXTASKSPERCHILD, chunksize: Optional[int] = DEFAULT_CHUNKSIZE_FILES, overwrite: bool = DEFAULT_OVERWRITE):
if raise_error_if_directory_not_exists(working_directory):
return
output_directory = working_directory
if custom_output_directory is not None:
if raise_error_if_directory_exists_and_not_overwrite(custom_output_directory, overwrite):
return
output_directory = custom_output_directory
selection = load_selection(working_directory)
reading_passages = load_reading_passages(working_directory)
reading_passages_paths = load_reading_passages_paths(working_directory)
representations = main_inplace(selection, reading_passages, reading_passages_paths,
n_jobs, maxtasksperchild, chunksize)
save_reading_passages(output_directory, reading_passages)
save_selection(output_directory, selection)
save_reading_passages_paths(output_directory, reading_passages_paths)
save_representations(output_directory, representations)
# TODO maybe also remove unused paths from paths | |
MATLAB.js | //Number.prototype.plus; //and complex
//use those so faster
function | (l, r) { return l + r; }
function minus(l, r) { return l - r; }
function times(l, r) { return l * r; }
//function rdivide() { return ; }
//function ldivide() { return ; }
//function power() { return ; }
function maximum() { return max(l, r); } | plus |
append_block_builder.rs | use crate::blob::blob::responses::PutBlockResponse;
use crate::blob::prelude::*;
use azure_core::headers::{add_optional_header, add_optional_header_ref};
use azure_core::prelude::*;
use bytes::Bytes;
#[derive(Debug, Clone)]
pub struct AppendBlockBuilder<'a> {
blob_client: &'a BlobClient,
body: Bytes,
hash: Option<&'a Hash>,
condition_max_size: Option<ConditionMaxSize>,
condition_append_position: Option<ConditionAppendPosition>,
lease_id: Option<&'a LeaseId>,
client_request_id: Option<ClientRequestId<'a>>,
timeout: Option<Timeout>,
}
impl<'a> AppendBlockBuilder<'a> {
pub(crate) fn new(blob_client: &'a BlobClient, body: impl Into<Bytes>) -> Self {
Self {
blob_client,
body: body.into(),
hash: None,
condition_max_size: None,
condition_append_position: None,
lease_id: None,
client_request_id: None,
timeout: None,
}
}
setters! {
hash: &'a Hash => Some(hash),
condition_max_size: ConditionMaxSize => Some(condition_max_size),
condition_append_position: ConditionAppendPosition => Some(condition_append_position),
lease_id: &'a LeaseId => Some(lease_id),
client_request_id: ClientRequestId<'a> => Some(client_request_id),
timeout: Timeout => Some(timeout),
}
pub async fn execute(
&self,
) -> Result<PutBlockResponse, Box<dyn std::error::Error + Send + Sync>> {
let mut url = self.blob_client.url_with_segments(None)?;
self.timeout.append_to_url_query(&mut url);
url.query_pairs_mut().append_pair("comp", "appendblock");
trace!("url == {:?}", url);
| &http::Method::PUT,
&|mut request| {
request = add_optional_header_ref(&self.hash, request);
request = add_optional_header(&self.condition_max_size, request);
request = add_optional_header(&self.condition_append_position, request);
request = add_optional_header_ref(&self.lease_id, request);
request = add_optional_header(&self.client_request_id, request);
request
},
Some(self.body.clone()),
)?;
let response = self
.blob_client
.http_client()
.execute_request_check_status(request, http::StatusCode::CREATED)
.await?;
debug!("response.headers() == {:#?}", response.headers());
Ok(PutBlockResponse::from_headers(response.headers())?)
}
} | let (request, _url) = self.blob_client.prepare_request(
url.as_str(), |
evaluation.py | import os
import argparse
from keras.models import load_model
import numpy as np
from sklearn.metrics import accuracy_score, f1_score
from data_preprocessing import *
parser = argparse.ArgumentParser()
parser.add_argument('-m', '--model_name', default='save/RDNN.h5', type=str)
parser.add_argument('--smooth', type=bool, default=False)
parser.add_argument('--scale', type=bool, default=False)
args = parser.parse_args()
print(args)
x_test = np.load('data/data_test_600.npy')
y_test = np.load('data/label_test_600.npy').reshape(-1, 1)
print('x_test: {}'.format(x_test.shape))
print('y_test: {}'.format(y_test.shape)) |
lie_ratio = np.sum(y_test)/y_test.shape[0]
print('Lie Ratio: {}'.format(lie_ratio))
x_test = TestPreprocess(x_test, args.smooth, args.scale)
print('='*20, 'Model Loading...', '='*20)
model = load_model(args.model_name)
print('='*20, 'Model Loaded', '='*20)
# os.system('clear')
predict = model.predict(x_test)
y_predict = (predict > 0.3).astype(np.int)
lie_ratio = np.sum(y_predict)/y_predict.shape[0]
print('Lie Ratio Predicted: {}'.format(lie_ratio))
score_f1 = f1_score(y_test, y_predict)
score_acc = accuracy_score(y_test, y_predict)
print('f1 score: {}'.format(score_f1))
print('accuracy score: {}'.format(score_acc)) | |
generate_mock_func_call_methods_test.go | package generation
import (
"fmt"
"testing"
"github.com/stretchr/testify/assert"
)
func TestGenerateMockFuncCallArgsMethod(t *testing.T) {
code := generateMockFuncCallArgsMethod(makeMethod(TestMethodDo))
expected := strip(`
// Args returns an interface slice containing the arguments of this
// invocation.
func (c TestClientDoFuncCall) Args() []interface{} {
return []interface{}{c.Arg0}
}
`)
assert.Equal(t, expected, fmt.Sprintf("%#v", code))
}
func TestGenerateMockFuncCallArgsMethodVariadic(t *testing.T) |
func TestGenerateMockFuncCallResultsMethod(t *testing.T) {
code := generateMockFuncCallResultsMethod(makeMethod(TestMethodDo))
expected := strip(`
// Results returns an interface slice containing the results of this
// invocation.
func (c TestClientDoFuncCall) Results() []interface{} {
return []interface{}{c.Result0}
}
`)
assert.Equal(t, expected, fmt.Sprintf("%#v", code))
}
func TestGenerateMockFuncCallResultsMethodMultiple(t *testing.T) {
code := generateMockFuncCallResultsMethod(makeMethod(TestMethodStatus))
expected := strip(`
// Results returns an interface slice containing the results of this
// invocation.
func (c TestClientStatusFuncCall) Results() []interface{} {
return []interface{}{c.Result0, c.Result1}
}
`)
assert.Equal(t, expected, fmt.Sprintf("%#v", code))
}
| {
code := generateMockFuncCallArgsMethod(makeMethod(TestMethodDof))
expected := strip(`
// Args returns an interface slice containing the arguments of this
// invocation. The variadic slice argument is flattened in this array such
// that one positional argument and three variadic arguments would result in
// a slice of four, not two.
func (c TestClientDofFuncCall) Args() []interface{} {
trailing := []interface{}{}
for _, val := range c.Arg1 {
trailing = append(trailing, val)
}
return append([]interface{}{c.Arg0}, trailing...)
}
`)
assert.Equal(t, expected, fmt.Sprintf("%#v", code))
} |
ast.go | package ast
import (
"bytes"
"my-interpreter/token"
"strings"
)
type Node interface {
TokenLiteral() string
String() string
}
type Statement interface {
Node
statementNode()
}
type Expression interface {
Node
expressionNode()
}
type Program struct {
Statements []Statement
}
type Boolean struct {
Token token.Token
Value bool
}
func (p *Program) TokenLiteral() string {
if len(p.Statements) > 0 {
return p.Statements[0].TokenLiteral()
} else {
return ""
}
}
func (p *Program) String() string {
var out bytes.Buffer
for _, s := range p.Statements {
out.WriteString(s.String())
}
return out.String()
}
type LetStatement struct {
Token token.Token // the token.LET token
Name *Identifier
Value Expression
}
func (ls *LetStatement) statementNode() {}
func (ls *LetStatement) TokenLiteral() string { return ls.Token.Literal }
func (ls *LetStatement) String() string {
var out bytes.Buffer
out.WriteString(ls.TokenLiteral() + " ")
out.WriteString(ls.Name.String())
out.WriteString(" = ")
if ls.Value != nil {
out.WriteString(ls.Value.String())
}
out.WriteString(";")
return out.String()
}
type Identifier struct {
Token token.Token // the token.IDENT token
Value string
}
func (i *Identifier) expressionNode() {}
func (i *Identifier) TokenLiteral() string { return i.Token.Literal }
func (i *Identifier) String() string { return i.Value }
type ReturnStatement struct {
Token token.Token // the `return` token
ReturnValue Expression
}
func (rs *ReturnStatement) statementNode() {}
func (rs *ReturnStatement) TokenLiteral() string { return rs.Token.Literal }
func (rs *ReturnStatement) String() string {
var out bytes.Buffer
out.WriteString(rs.TokenLiteral() + " ")
if rs.ReturnValue != nil {
out.WriteString(rs.ReturnValue.String())
}
out.WriteString(";")
return out.String()
}
type ExpressionStatement struct {
Token token.Token // the first token of the expression
Expression Expression
}
func (es *ExpressionStatement) statementNode() {}
func (es *ExpressionStatement) TokenLiteral() string { return es.Token.Literal }
func (es *ExpressionStatement) String() string {
if es.Expression != nil {
return es.Expression.String()
}
return ""
}
type IntegerLiteral struct {
Token token.Token
Value int64
}
func (il *IntegerLiteral) expressionNode() {}
func (il *IntegerLiteral) TokenLiteral() string { return il.Token.Literal }
func (il *IntegerLiteral) String() string { return il.Token.Literal }
type PrefixExpression struct {
Token token.Token // The prefix token, e.g. !
Operator string
Right Expression
}
func (pe *PrefixExpression) expressionNode() {}
func (pe *PrefixExpression) TokenLiteral() string { return pe.Token.Literal }
func (pe *PrefixExpression) String() string {
var out bytes.Buffer
out.WriteString("(")
out.WriteString(pe.Operator)
out.WriteString(pe.Right.String())
out.WriteString(")")
return out.String()
}
type InfixExpression struct {
Token token.Token // The operator token, e.g. +
Left Expression
Operator string
Right Expression
}
func (oe *InfixExpression) expressionNode() {}
func (oe *InfixExpression) TokenLiteral() string { return oe.Token.Literal }
func (oe *InfixExpression) String() string {
var out bytes.Buffer
out.WriteString("(")
out.WriteString(oe.Left.String())
out.WriteString(" " + oe.Operator + " ")
out.WriteString(oe.Right.String())
out.WriteString(")")
return out.String()
}
func (b *Boolean) expressionNode() {}
func (b *Boolean) TokenLiteral() string { return b.Token.Literal }
func (b *Boolean) String() string { return b.Token.Literal }
type IfExpression struct {
Token token.Token // The 'if' token
Condition Expression
Consequence *BlockStatement
Alternative *BlockStatement
}
func (ie *IfExpression) expressionNode() {}
func (ie *IfExpression) TokenLiteral() string { return ie.Token.Literal }
func (ie *IfExpression) String() string {
var out bytes.Buffer
out.WriteString("if")
out.WriteString(ie.Condition.String())
out.WriteString(" ")
out.WriteString(ie.Consequence.String())
if ie.Alternative != nil {
out.WriteString("else ")
out.WriteString(ie.Alternative.String())
}
return out.String()
}
type BlockStatement struct {
Token token.Token // the { token
Statements []Statement
}
func (bs *BlockStatement) statementNode() {}
func (bs *BlockStatement) TokenLiteral() string { return bs.Token.Literal }
func (bs *BlockStatement) String() string {
var out bytes.Buffer
for _, s := range bs.Statements {
out.WriteString(s.String())
}
return out.String()
}
type FunctionLiteral struct {
Token token.Token // The 'fn' token
Parameters []*Identifier
Body *BlockStatement
}
func (fl *FunctionLiteral) expressionNode() {}
func (fl *FunctionLiteral) TokenLiteral() string { return fl.Token.Literal }
func (fl *FunctionLiteral) String() string {
var out bytes.Buffer
params := []string{}
for _, p := range fl.Parameters {
params = append(params, p.String())
}
out.WriteString(fl.TokenLiteral())
out.WriteString("(")
out.WriteString(strings.Join(params, ", "))
out.WriteString(") ")
out.WriteString(fl.Body.String())
return out.String()
}
type CallExpression struct {
Token token.Token // The '(' token
Function Expression // Identifier or FunctionLiteral
Arguments []Expression
}
func (ce *CallExpression) expressionNode() {}
func (ce *CallExpression) TokenLiteral() string { return ce.Token.Literal }
func (ce *CallExpression) String() string {
var out bytes.Buffer
args := []string{}
for _, a := range ce.Arguments {
args = append(args, a.String())
}
out.WriteString(ce.Function.String())
out.WriteString("(")
out.WriteString(strings.Join(args, ", ")) | return out.String()
}
type StringLiteral struct {
Token token.Token
Value string
}
func (sl *StringLiteral) expressionNode() {}
func (sl *StringLiteral) TokenLiteral() string { return sl.Token.Literal }
func (sl *StringLiteral) String() string { return sl.Token.Literal }
type ArrayLiteral struct {
Token token.Token // the '[' token
Elements []Expression
}
func (al *ArrayLiteral) expressionNode() {}
func (al *ArrayLiteral) TokenLiteral() string { return al.Token.Literal }
func (al *ArrayLiteral) String() string {
var out bytes.Buffer
elements := []string{}
for _, el := range al.Elements {
elements = append(elements, el.String())
}
out.WriteString("[")
out.WriteString(strings.Join(elements, ", "))
out.WriteString("]")
return out.String()
}
type IndexExpression struct {
Token token.Token // The [ token
Left Expression
Index Expression
}
func (ie *IndexExpression) expressionNode() {}
func (ie *IndexExpression) TokenLiteral() string { return ie.Token.Literal }
func (ie *IndexExpression) String() string {
var out bytes.Buffer
out.WriteString("(")
out.WriteString(ie.Left.String())
out.WriteString("[")
out.WriteString(ie.Index.String())
out.WriteString("])")
return out.String()
}
type HashLiteral struct {
Token token.Token // the `{` token
Pairs map[Expression]Expression
}
func (hl *HashLiteral) expressionNode() {}
func (hl *HashLiteral) TokenLiteral() string { return hl.Token.Literal }
func (hl *HashLiteral) String() string {
var out bytes.Buffer
pairs := []string{}
for key, value := range hl.Pairs {
pairs = append(pairs, key.String()+":"+value.String())
}
out.WriteString("{")
out.WriteString(strings.Join(pairs, ", "))
out.WriteString("}")
return out.String()
} | out.WriteString(")")
|
buffer.rs | //! Buffer definition of generic netlink packet
use crate::{constants::GENL_HDRLEN, header::GenlHeader, message::GenlMessage};
use netlink_packet_core::DecodeError;
use netlink_packet_utils::{Parseable, ParseableParametrized};
use std::fmt::Debug;
buffer!(GenlBuffer(GENL_HDRLEN) {
cmd: (u8, 0),
version: (u8, 1),
payload: (slice, GENL_HDRLEN..),
});
impl<F> ParseableParametrized<[u8], u16> for GenlMessage<F>
where
F: ParseableParametrized<[u8], GenlHeader> + Debug,
{
fn parse_with_param(buf: &[u8], message_type: u16) -> Result<Self, DecodeError> |
}
impl<'a, F, T> ParseableParametrized<GenlBuffer<&'a T>, u16> for GenlMessage<F>
where
F: ParseableParametrized<[u8], GenlHeader> + Debug,
T: AsRef<[u8]> + ?Sized,
{
fn parse_with_param(buf: &GenlBuffer<&'a T>, message_type: u16) -> Result<Self, DecodeError> {
let header = GenlHeader::parse(buf)?;
let payload_buf = buf.payload();
Ok(GenlMessage::new(
header,
F::parse_with_param(payload_buf, header)?,
message_type,
))
}
}
| {
let buf = GenlBuffer::new_checked(buf)?;
Self::parse_with_param(&buf, message_type)
} |
index.ts | /*
* @license
* Copyright Hôpitaux Universitaires de Genève. All Rights Reserved.
*
* Use of this source code is governed by an Apache-2.0 license that can be
* found in the LICENSE file at https://github.com/DSI-HUG/dejajs-components/blob/master/LICENSE
*/
import { CommonModule } from '@angular/common';
import { NgModule } from '@angular/core';
import { FormsModule } from '@angular/forms';
import { DejaEditableDirective } from './content-editable.directive';
@NgModule({
declarations: [DejaEditableDirective],
exports: [DejaEditableDirective],
imports: [
CommonModule,
FormsModule,
],
})
export class De | }
export * from './content-editable.directive';
| jaEditableModule { |
hgwi.py | import torch
import torch.nn as nn
class RadarStackedHourglass(nn.Module):
|
class InceptionLayerConcat(nn.Module):
"""
Kernal size: for 2d kernal size, since the kernal size in temporal domain will be fixed
"""
def __init__(self, kernal_size, in_channel, stride):
super(InceptionLayerConcat, self).__init__()
paddingX = kernal_size[0] // 2
paddingY = kernal_size[1] // 2
self.branch1 = nn.Conv3d(
in_channels=in_channel,
out_channels=32,
kernel_size=(5, kernal_size[0], kernal_size[1]),
stride=stride,
padding=(2, paddingX, paddingY),
)
self.branch2a = nn.Conv3d(
in_channels=in_channel,
out_channels=64,
kernel_size=(5, kernal_size[0], kernal_size[1]),
stride=(1, 1, 1),
padding=(2, paddingX, paddingY),
)
self.branch2b = nn.Conv3d(
in_channels=64,
out_channels=64,
kernel_size=(9, kernal_size[0], kernal_size[1]),
stride=stride,
padding=(4, paddingX, paddingY),
)
self.branch3a = nn.Conv3d(
in_channels=in_channel,
out_channels=64,
kernel_size=(5, kernal_size[0], kernal_size[1]),
stride=(1, 1, 1),
padding=(2, paddingX, paddingY),
)
self.branch3b = nn.Conv3d(
in_channels=64,
out_channels=64,
kernel_size=(13, kernal_size[0], kernal_size[1]),
stride=stride,
padding=(6, paddingX, paddingY),
)
def forward(self, x):
branch1 = self.branch1(x)
branch2 = self.branch2a(x)
branch2 = self.branch2b(branch2)
branch3 = self.branch3a(x)
branch3 = self.branch3b(branch3)
return torch.cat((branch1, branch2, branch3), 1)
class RODEncode(nn.Module):
def __init__(self):
super(RODEncode, self).__init__()
self.inception1 = InceptionLayerConcat(
kernal_size=(5, 5), in_channel=160, stride=(1, 2, 2)
)
self.inception2 = InceptionLayerConcat(
kernal_size=(5, 5), in_channel=160, stride=(1, 2, 2)
)
self.inception3 = InceptionLayerConcat(
kernal_size=(5, 5), in_channel=160, stride=(1, 2, 2)
)
self.skip_inception1 = InceptionLayerConcat(
kernal_size=(5, 5), in_channel=160, stride=(1, 2, 2)
)
self.skip_inception2 = InceptionLayerConcat(
kernal_size=(5, 5), in_channel=160, stride=(1, 2, 2)
)
self.skip_inception3 = InceptionLayerConcat(
kernal_size=(5, 5), in_channel=160, stride=(1, 2, 2)
)
# self.conv4a = nn.Conv3d(in_channels=64, out_channels=64,
# kernel_size=(9, 5, 5), stride=(1, 1, 1), padding=(4, 2, 2))
# self.conv4b = nn.Conv3d(in_channels=64, out_channels=64,
# kernel_size=(9, 5, 5), stride=(1, 2, 2), padding=(4, 2, 2))
# self.conv5a = nn.Conv3d(in_channels=64, out_channels=64,
# kernel_size=(9, 5, 5), stride=(1, 1, 1), padding=(4, 2, 2))
# self.conv5b = nn.Conv3d(in_channels=64, out_channels=64,
# kernel_size=(9, 5, 5), stride=(1, 2, 2), padding=(4, 2, 2))
self.bn1 = nn.BatchNorm3d(num_features=160)
self.bn2 = nn.BatchNorm3d(num_features=160)
self.bn3 = nn.BatchNorm3d(num_features=160)
self.skip_bn1 = nn.BatchNorm3d(num_features=160)
self.skip_bn2 = nn.BatchNorm3d(num_features=160)
self.skip_bn3 = nn.BatchNorm3d(num_features=160)
# self.bn4a = nn.BatchNorm3d(num_features=64)
# self.bn4b = nn.BatchNorm3d(num_features=64)
# self.bn5a = nn.BatchNorm3d(num_features=64)
# self.bn5b = nn.BatchNorm3d(num_features=64)
self.relu = nn.ReLU()
def forward(self, x):
x1 = self.relu(self.skip_bn1(self.skip_inception1(x)))
x = self.relu(
self.bn1(self.inception1(x))
) # (B, 2, W, 128, 128) -> (B, 64, W, 128, 128)
x2 = self.relu(self.skip_bn2(self.skip_inception2(x)))
x = self.relu(
self.bn2(self.inception2(x))
) # (B, 2, W, 128, 128) -> (B, 64, W, 128, 128)
x3 = self.relu(self.skip_bn3(self.skip_inception3(x)))
x = self.relu(
self.bn3(self.inception3(x))
) # (B, 2, W, 128, 128) -> (B, 64, W, 128, 128)
return x, x1, x2, x3
class RODDecode(nn.Module):
def __init__(self):
super(RODDecode, self).__init__()
self.convt1 = nn.ConvTranspose3d(
in_channels=160,
out_channels=160,
kernel_size=(3, 6, 6),
stride=(1, 2, 2),
padding=(1, 2, 2),
)
self.convt2 = nn.ConvTranspose3d(
in_channels=160,
out_channels=160,
kernel_size=(3, 6, 6),
stride=(1, 2, 2),
padding=(1, 2, 2),
)
self.convt3 = nn.ConvTranspose3d(
in_channels=160,
out_channels=160,
kernel_size=(3, 6, 6),
stride=(1, 2, 2),
padding=(1, 2, 2),
)
self.conv1 = nn.Conv3d(
in_channels=160,
out_channels=160,
kernel_size=(9, 5, 5),
stride=(1, 1, 1),
padding=(4, 2, 2),
)
self.conv2 = nn.Conv3d(
in_channels=160,
out_channels=160,
kernel_size=(9, 5, 5),
stride=(1, 1, 1),
padding=(4, 2, 2),
)
self.conv3 = nn.Conv3d(
in_channels=160,
out_channels=160,
kernel_size=(9, 5, 5),
stride=(1, 1, 1),
padding=(4, 2, 2),
)
self.prelu = nn.PReLU()
self.sigmoid = nn.Sigmoid()
# self.upsample = nn.Upsample(size=(rodnet_configs['win_size'], radar_configs['ramap_rsize'],
# radar_configs['ramap_asize']), mode='nearest')
def forward(self, x, x1, x2, x3):
x = self.prelu(
self.convt1(x + x3)
) # (B, 256, W/4, 16, 16) -> (B, 128, W/2, 32, 32)
x = self.prelu(self.conv1(x))
x = self.prelu(
self.convt2(x + x2)
) # (B, 128, W/2, 32, 32) -> (B, 64, W, 64, 64)
x = self.prelu(self.conv2(x))
x = self.prelu(self.convt3(x + x1)) # (B, 64, W, 64, 64) -> (B, 3, W, 128, 128)
x = self.prelu(self.conv3(x))
return x
| def __init__(self, n_class, stacked_num=1):
super(RadarStackedHourglass, self).__init__()
self.stacked_num = stacked_num
self.conv1a = nn.Conv3d(
in_channels=2,
out_channels=32,
kernel_size=(9, 5, 5),
stride=(1, 1, 1),
padding=(4, 2, 2),
)
self.conv1b = nn.Conv3d(
in_channels=32,
out_channels=64,
kernel_size=(9, 5, 5),
stride=(1, 1, 1),
padding=(4, 2, 2),
)
self.conv1c = nn.Conv3d(
in_channels=64,
out_channels=160,
kernel_size=(9, 5, 5),
stride=(1, 1, 1),
padding=(4, 2, 2),
)
self.hourglass = []
for i in range(stacked_num):
self.hourglass.append(
nn.ModuleList(
[
RODEncode(),
RODDecode(),
nn.Conv3d(
in_channels=160,
out_channels=n_class,
kernel_size=(9, 5, 5),
stride=(1, 1, 1),
padding=(4, 2, 2),
),
nn.Conv3d(
in_channels=n_class,
out_channels=160,
kernel_size=(9, 5, 5),
stride=(1, 1, 1),
padding=(4, 2, 2),
),
]
)
)
self.hourglass = nn.ModuleList(self.hourglass)
self.relu = nn.ReLU()
self.bn1a = nn.BatchNorm3d(num_features=32)
self.bn1b = nn.BatchNorm3d(num_features=64)
self.bn1c = nn.BatchNorm3d(num_features=160)
self.sigmoid = nn.Sigmoid()
def forward(self, x):
x = self.relu(self.bn1a(self.conv1a(x)))
x = self.relu(self.bn1b(self.conv1b(x)))
x = self.relu(self.bn1c(self.conv1c(x)))
out = []
for i in range(self.stacked_num):
x, x1, x2, x3 = self.hourglass[i][0](x)
x = self.hourglass[i][1](x, x1, x2, x3)
confmap = self.hourglass[i][2](x)
out.append(self.sigmoid(confmap))
if i < self.stacked_num - 1:
confmap_ = self.hourglass[i][3](confmap)
x = x + confmap_
return out |
notification-http.service.ts | import { HttpClient, HttpParams } from '@angular/common/http';
import { Injectable } from '@angular/core';
import { Observable } from 'rxjs';
import { AuthService } from 'src/app/auth/services/auth.service';
import { INotification } from '../types/notification';
import { INotificationConfirm } from '../types/notification-confirm';
@Injectable({
providedIn: 'root'
})
export class NotificationHttpService {
private readonly notificationUrl = '/api/notifications';
constructor(
private http: HttpClient,
private readonly authService: AuthService
) { }
public getNotificationList(): Observable<INotification[]> {
const params = new HttpParams().append('userId', this.authService.userId);
return this.http.get<INotification[]>(this.notificationUrl, { params });
}
public readNotification(notification: INotification): Observable<void> {
return this.http.put<void>(`${this.notificationUrl}/${notification.id}/read`, notification);
}
public changeNotificationConfirmation(notification: INotification, confirm: number): Observable<INotification> {
const params = new HttpParams().append('confirm', `${confirm}`);
return this.http.put<INotification>(`${this.notificationUrl}/${notification.id}`, notification, { params });
}
} | ||
kem.rs | //! KEM API
//!
//! See [`Kem`] for the main functionality.
//! [`Algorithm`] lists the available algorithms.
use alloc::borrow;
use alloc::vec::Vec;
use core::ptr::NonNull;
#[cfg(feature = "no_std")]
use cstr_core::CStr;
#[cfg(not(feature = "no_std"))]
use std::ffi::CStr;
#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
use crate::ffi::kem as ffi;
use crate::newtype_buffer;
use crate::*;
newtype_buffer!(PublicKey, PublicKeyRef);
newtype_buffer!(SecretKey, SecretKeyRef);
newtype_buffer!(Ciphertext, CiphertextRef);
newtype_buffer!(SharedSecret, SharedSecretRef);
macro_rules! implement_kems {
{ $(($feat: literal) $kem: ident: $oqs_id: ident),* $(,)? } => (
/// Supported algorithms by OQS
///
/// Note that this doesn't mean that they'll be available.
///
/// Optional support for `serde` if that feature is enabled.
#[derive(Clone, Copy, Debug)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[allow(missing_docs)]
pub enum Algorithm {
$(
$kem,
)*
}
fn algorithm_to_id(algorithm: Algorithm) -> *const libc::c_char {
let id: &[u8] = match algorithm {
$(
Algorithm::$kem => &ffi::$oqs_id[..],
)*
};
id as *const _ as *const i8
}
$(
#[cfg(test)]
#[allow(non_snake_case)]
mod $kem {
use super::*;
#[test]
#[cfg(feature = $feat)]
fn test_encaps_decaps() -> Result<()> {
crate::init();
let alg = Algorithm::$kem;
let kem = Kem::new(alg)?;
let (pk, sk) = kem.keypair()?;
let (ct, ss1) = kem.encapsulate(&pk)?;
let ss2 = kem.decapsulate(&sk, &ct)?;
assert_eq!(ss1, ss2, "shared secret not equal!");
Ok(())
}
#[test]
fn test_enabled() {
crate::init();
if cfg!(feature = $feat) {
assert!(Algorithm::$kem.is_enabled());
} else {
assert!(!Algorithm::$kem.is_enabled())
}
}
}
)*
)
}
implement_kems! {
("bike") BikeL1: OQS_KEM_alg_bike_l1,
("bike") BikeL3: OQS_KEM_alg_bike_l3,
("classic_mceliece") ClassicMcEliece348864: OQS_KEM_alg_classic_mceliece_348864,
("classic_mceliece") ClassicMcEliece348864f: OQS_KEM_alg_classic_mceliece_348864f,
("classic_mceliece") ClassicMcEliece460896: OQS_KEM_alg_classic_mceliece_460896,
("classic_mceliece") ClassicMcEliece460896f: OQS_KEM_alg_classic_mceliece_460896f,
("classic_mceliece") ClassicMcEliece6688128: OQS_KEM_alg_classic_mceliece_6688128,
("classic_mceliece") ClassicMcEliece6688128f: OQS_KEM_alg_classic_mceliece_6688128f,
("classic_mceliece") ClassicMcEliece6960119: OQS_KEM_alg_classic_mceliece_6960119,
("classic_mceliece") ClassicMcEliece6960119f: OQS_KEM_alg_classic_mceliece_6960119f,
("classic_mceliece") ClassicMcEliece8192128: OQS_KEM_alg_classic_mceliece_8192128,
("classic_mceliece") ClassicMcEliece8192128f: OQS_KEM_alg_classic_mceliece_8192128f,
("hqc") Hqc128: OQS_KEM_alg_hqc_128,
("hqc") Hqc192: OQS_KEM_alg_hqc_192,
("hqc") Hqc256: OQS_KEM_alg_hqc_256,
("kyber") Kyber512: OQS_KEM_alg_kyber_512,
("kyber") Kyber768: OQS_KEM_alg_kyber_768,
("kyber") Kyber1024: OQS_KEM_alg_kyber_1024,
("kyber") Kyber512_90s: OQS_KEM_alg_kyber_512_90s,
("kyber") Kyber768_90s: OQS_KEM_alg_kyber_768_90s,
("kyber") Kyber1024_90s: OQS_KEM_alg_kyber_1024_90s,
("ntru") NtruHps2048509: OQS_KEM_alg_ntru_hps2048509,
("ntru") NtruHps2048677: OQS_KEM_alg_ntru_hps2048677,
("ntru") NtruHps4096821: OQS_KEM_alg_ntru_hps4096821,
("ntru") NtruHrss701: OQS_KEM_alg_ntru_hrss701,
("ntruprime") NtruPrimeNtrulpr653: OQS_KEM_alg_ntruprime_ntrulpr653,
("ntruprime") NtruPrimeNtrulpr761: OQS_KEM_alg_ntruprime_ntrulpr761,
("ntruprime") NtruPrimeNtrulpr857: OQS_KEM_alg_ntruprime_ntrulpr857,
("ntruprime") NtruPrimeSntrup653: OQS_KEM_alg_ntruprime_sntrup653,
("ntruprime") NtruPrimeSntrup761: OQS_KEM_alg_ntruprime_sntrup761,
("ntruprime") NtruPrimeSntrup857: OQS_KEM_alg_ntruprime_sntrup857,
("saber") Lightsaber: OQS_KEM_alg_saber_lightsaber,
("saber") Saber: OQS_KEM_alg_saber_saber,
("saber") Firesaber: OQS_KEM_alg_saber_firesaber,
("frodokem") FrodoKem640Aes: OQS_KEM_alg_frodokem_640_aes,
("frodokem") FrodoKem640Shake: OQS_KEM_alg_frodokem_640_shake,
("frodokem") FrodoKem976Aes: OQS_KEM_alg_frodokem_976_aes,
("frodokem") FrodoKem976Shake: OQS_KEM_alg_frodokem_976_shake,
("frodokem") FrodoKem1344Aes: OQS_KEM_alg_frodokem_1344_aes,
("frodokem") FrodoKem1344Shake: OQS_KEM_alg_frodokem_1344_shake,
("sidh") SidhP434: OQS_KEM_alg_sidh_p434,
("sidh") SidhP503: OQS_KEM_alg_sidh_p503,
("sidh") SidhP610: OQS_KEM_alg_sidh_p610,
("sidh") SidhP751: OQS_KEM_alg_sidh_p751,
("sidh") SidhP434Compressed: OQS_KEM_alg_sidh_p434_compressed,
("sidh") SidhP503Compressed: OQS_KEM_alg_sidh_p503_compressed,
("sidh") SidhP610Compressed: OQS_KEM_alg_sidh_p610_compressed,
("sidh") SidhP751Compressed: OQS_KEM_alg_sidh_p751_compressed,
("sike") SikeP434: OQS_KEM_alg_sike_p434,
("sike") SikeP503: OQS_KEM_alg_sike_p503,
("sike") SikeP610: OQS_KEM_alg_sike_p610,
("sike") SikeP751: OQS_KEM_alg_sike_p751,
("sike") SikeP434Compressed: OQS_KEM_alg_sike_p434_compressed,
("sike") SikeP503Compressed: OQS_KEM_alg_sike_p503_compressed,
("sike") SikeP610Compressed: OQS_KEM_alg_sike_p610_compressed,
("sike") SikeP751Compressed: OQS_KEM_alg_sike_p751_compressed,
}
impl Algorithm {
/// Returns true if this algorithm is enabled in the linked version
/// of liboqs
pub fn is_enabled(self) -> bool {
unsafe { ffi::OQS_KEM_alg_is_enabled(algorithm_to_id(self)) == 1 }
}
/// Provides a pointer to the id of the algorithm
///
/// For use with the FFI api methods
pub fn to_id(self) -> *const libc::c_char {
algorithm_to_id(self)
}
}
/// KEM algorithm
///
/// # Example
/// ```rust
/// # if !cfg!(feature = "kyber") { return; }
/// use oqs;
/// oqs::init();
/// let kem = oqs::kem::Kem::new(oqs::kem::Algorithm::Kyber512).unwrap();
/// let (pk, sk) = kem.keypair().unwrap();
/// let (ct, ss) = kem.encapsulate(&pk).unwrap();
/// let ss2 = kem.decapsulate(&sk, &ct).unwrap();
/// assert_eq!(ss, ss2);
/// ```
pub struct Kem {
kem: NonNull<ffi::OQS_KEM>,
}
unsafe impl Sync for Kem {}
unsafe impl Send for Kem {}
impl Drop for Kem {
fn drop(&mut self) {
unsafe { ffi::OQS_KEM_free(self.kem.as_ptr()) };
}
}
impl core::convert::TryFrom<Algorithm> for Kem {
type Error = crate::Error;
fn try_from(alg: Algorithm) -> Result<Kem> {
Kem::new(alg)
}
}
impl Kem {
/// Construct a new algorithm
pub fn new(algorithm: Algorithm) -> Result<Self> {
let kem = unsafe { ffi::OQS_KEM_new(algorithm_to_id(algorithm)) };
NonNull::new(kem).map_or_else(|| Err(Error::AlgorithmDisabled), |kem| Ok(Self { kem }))
}
/// Get the name of the algorithm
pub fn name(&self) -> borrow::Cow<str> {
let kem = unsafe { self.kem.as_ref() };
let cstr = unsafe { CStr::from_ptr(kem.method_name) };
cstr.to_string_lossy()
}
/// Get the version of the implementation
pub fn version(&self) -> borrow::Cow<str> {
let kem = unsafe { self.kem.as_ref() };
let cstr = unsafe { CStr::from_ptr(kem.method_name) };
cstr.to_string_lossy()
}
/// Get the claimed nist level
pub fn claimed_nist_level(&self) -> u8 {
let kem = unsafe { self.kem.as_ref() };
kem.claimed_nist_level
}
/// Is the algorithm ind_cca secure
pub fn is_ind_cca(&self) -> bool {
let kem = unsafe { self.kem.as_ref() };
kem.ind_cca
}
/// Get the length of the public key
pub fn length_public_key(&self) -> usize {
let kem = unsafe { self.kem.as_ref() };
kem.length_public_key
}
/// Get the length of the secret key
pub fn length_secret_key(&self) -> usize |
/// Get the length of the ciphertext
pub fn length_ciphertext(&self) -> usize {
let kem = unsafe { self.kem.as_ref() };
kem.length_ciphertext
}
/// Get the length of a shared secret
pub fn length_shared_secret(&self) -> usize {
let kem = unsafe { self.kem.as_ref() };
kem.length_shared_secret
}
/// Obtain a secret key objects from bytes
///
/// Returns None if the secret key is not the correct length.
pub fn secret_key_from_bytes<'a>(&self, buf: &'a [u8]) -> Option<SecretKeyRef<'a>> {
if self.length_secret_key() != buf.len() {
None
} else {
Some(SecretKeyRef::new(buf))
}
}
/// Obtain a public key from bytes
///
/// Returns None if the public key is not the correct length.
pub fn public_key_from_bytes<'a>(&self, buf: &'a [u8]) -> Option<PublicKeyRef<'a>> {
if self.length_public_key() != buf.len() {
None
} else {
Some(PublicKeyRef::new(buf))
}
}
/// Obtain a ciphertext from bytes
///
/// Returns None if the ciphertext is not the correct length.
pub fn ciphertext_from_bytes<'a>(&self, buf: &'a [u8]) -> Option<CiphertextRef<'a>> {
if self.length_ciphertext() != buf.len() {
None
} else {
Some(CiphertextRef::new(buf))
}
}
/// Obtain a secret key from bytes
///
/// Returns None if the shared secret is not the correct length.
pub fn shared_secret_from_bytes<'a>(&self, buf: &'a [u8]) -> Option<SharedSecretRef<'a>> {
if self.length_shared_secret() != buf.len() {
None
} else {
Some(SharedSecretRef::new(buf))
}
}
/// Generate a new keypair
pub fn keypair(&self) -> Result<(PublicKey, SecretKey)> {
let kem = unsafe { self.kem.as_ref() };
let func = kem.keypair.unwrap();
let mut pk = PublicKey {
bytes: Vec::with_capacity(kem.length_public_key),
};
let mut sk = SecretKey {
bytes: Vec::with_capacity(kem.length_secret_key),
};
let status = unsafe { func(pk.bytes.as_mut_ptr(), sk.bytes.as_mut_ptr()) };
status_to_result(status)?;
// update the lengths of the vecs
// this is safe to do, as we have initialised them now.
unsafe {
pk.bytes.set_len(kem.length_public_key);
sk.bytes.set_len(kem.length_secret_key);
}
Ok((pk, sk))
}
/// Encapsulate to the provided public key
pub fn encapsulate<'a, P: Into<PublicKeyRef<'a>>>(
&self,
pk: P,
) -> Result<(Ciphertext, SharedSecret)> {
let pk = pk.into();
if pk.bytes.len() != self.length_public_key() {
return Err(Error::InvalidLength);
}
let kem = unsafe { self.kem.as_ref() };
let func = kem.encaps.unwrap();
let mut ct = Ciphertext {
bytes: Vec::with_capacity(kem.length_ciphertext),
};
let mut ss = SharedSecret {
bytes: Vec::with_capacity(kem.length_shared_secret),
};
// call encapsulate
let status = unsafe {
func(
ct.bytes.as_mut_ptr(),
ss.bytes.as_mut_ptr(),
pk.bytes.as_ptr(),
)
};
status_to_result(status)?;
// update the lengths of the vecs
// this is safe to do, as we have initialised them now.
unsafe {
ct.bytes.set_len(kem.length_ciphertext);
ss.bytes.set_len(kem.length_shared_secret);
}
Ok((ct, ss))
}
/// Decapsulate the provided ciphertext
pub fn decapsulate<'a, 'b, S: Into<SecretKeyRef<'a>>, C: Into<CiphertextRef<'b>>>(
&self,
sk: S,
ct: C,
) -> Result<SharedSecret> {
let kem = unsafe { self.kem.as_ref() };
let sk = sk.into();
let ct = ct.into();
if sk.bytes.len() != self.length_secret_key() || ct.bytes.len() != self.length_ciphertext()
{
return Err(Error::InvalidLength);
}
let mut ss = SharedSecret {
bytes: Vec::with_capacity(kem.length_shared_secret),
};
let func = kem.decaps.unwrap();
// Call decapsulate
let status = unsafe { func(ss.bytes.as_mut_ptr(), ct.bytes.as_ptr(), sk.bytes.as_ptr()) };
status_to_result(status)?;
// update the lengths of the vecs
// this is safe to do, as we have initialised them now.
unsafe { ss.bytes.set_len(kem.length_shared_secret) };
Ok(ss)
}
}
| {
let kem = unsafe { self.kem.as_ref() };
kem.length_secret_key
} |
bidi.py | # Copyright 2017, Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Bi-directional streaming RPC helpers."""
import collections
import datetime
import logging
import threading
import time
from six.moves import queue
from google.api_core import exceptions
_LOGGER = logging.getLogger(__name__)
_BIDIRECTIONAL_CONSUMER_NAME = "Thread-ConsumeBidirectionalStream"
class _RequestQueueGenerator(object):
"""A helper for sending requests to a gRPC stream from a Queue.
This generator takes requests off a given queue and yields them to gRPC.
This helper is useful when you have an indeterminate, indefinite, or
otherwise open-ended set of requests to send through a request-streaming
(or bidirectional) RPC.
The reason this is necessary is because gRPC takes an iterator as the
request for request-streaming RPCs. gRPC consumes this iterator in another
thread to allow it to block while generating requests for the stream.
However, if the generator blocks indefinitely gRPC will not be able to
clean up the thread as it'll be blocked on `next(iterator)` and not be able
to check the channel status to stop iterating. This helper mitigates that
by waiting on the queue with a timeout and checking the RPC state before
yielding.
Finally, it allows for retrying without swapping queues because if it does
pull an item off the queue when the RPC is inactive, it'll immediately put
it back and then exit. This is necessary because yielding the item in this
case will cause gRPC to discard it. In practice, this means that the order
of messages is not guaranteed. If such a thing is necessary it would be
easy to use a priority queue.
Example::
requests = request_queue_generator(q)
call = stub.StreamingRequest(iter(requests))
requests.call = call
for response in call:
print(response)
q.put(...)
Note that it is possible to accomplish this behavior without "spinning"
(using a queue timeout). One possible way would be to use more threads to
multiplex the grpc end event with the queue, another possible way is to
use selectors and a custom event/queue object. Both of these approaches
are significant from an engineering perspective for small benefit - the
CPU consumed by spinning is pretty minuscule.
Args:
queue (queue.Queue): The request queue.
period (float): The number of seconds to wait for items from the queue
before checking if the RPC is cancelled. In practice, this
determines the maximum amount of time the request consumption
thread will live after the RPC is cancelled.
initial_request (Union[protobuf.Message,
Callable[None, protobuf.Message]]): The initial request to
yield. This is done independently of the request queue to allow fo
easily restarting streams that require some initial configuration
request.
"""
def __init__(self, queue, period=1, initial_request=None):
self._queue = queue
self._period = period
self._initial_request = initial_request
self.call = None
def _is_active(self):
# Note: there is a possibility that this starts *before* the call
# property is set. So we have to check if self.call is set before
# seeing if it's active.
if self.call is not None and not self.call.is_active():
return False
else:
return True
def __iter__(self):
if self._initial_request is not None:
if callable(self._initial_request):
yield self._initial_request()
else:
yield self._initial_request
while True:
try:
item = self._queue.get(timeout=self._period)
except queue.Empty:
if not self._is_active():
_LOGGER.debug(
"Empty queue and inactive call, exiting request " "generator."
)
return
else:
# call is still active, keep waiting for queue items.
continue
# The consumer explicitly sent "None", indicating that the request
# should end.
if item is None:
_LOGGER.debug("Cleanly exiting request generator.")
return
if not self._is_active():
# We have an item, but the call is closed. We should put the
# item back on the queue so that the next call can consume it.
self._queue.put(item)
_LOGGER.debug(
"Inactive call, replacing item on queue and exiting "
"request generator."
)
return
yield item
class _Throttle(object):
"""A context manager limiting the total entries in a sliding time window.
If more than ``access_limit`` attempts are made to enter the context manager
instance in the last ``time window`` interval, the exceeding requests block
until enough time elapses.
The context manager instances are thread-safe and can be shared between
multiple threads. If multiple requests are blocked and waiting to enter,
the exact order in which they are allowed to proceed is not determined.
Example::
max_three_per_second = _Throttle(
access_limit=3, time_window=datetime.timedelta(seconds=1)
)
for i in range(5):
with max_three_per_second as time_waited:
print("{}: Waited {} seconds to enter".format(i, time_waited))
Args:
access_limit (int): the maximum number of entries allowed in the time window
time_window (datetime.timedelta): the width of the sliding time window
"""
def __init__(self, access_limit, time_window):
if access_limit < 1:
raise ValueError("access_limit argument must be positive")
if time_window <= datetime.timedelta(0):
raise ValueError("time_window argument must be a positive timedelta")
self._time_window = time_window
self._access_limit = access_limit
self._past_entries = collections.deque(maxlen=access_limit) # least recent first
self._entry_lock = threading.Lock()
def __enter__(self):
with self._entry_lock:
cutoff_time = datetime.datetime.now() - self._time_window
# drop the entries that are too old, as they are no longer relevant
while self._past_entries and self._past_entries[0] < cutoff_time:
self._past_entries.popleft()
if len(self._past_entries) < self._access_limit:
self._past_entries.append(datetime.datetime.now())
return 0.0 # no waiting was needed
to_wait = (self._past_entries[0] - cutoff_time).total_seconds()
time.sleep(to_wait)
self._past_entries.append(datetime.datetime.now())
return to_wait
def __exit__(self, *_):
pass
def __repr__(self):
return "{}(access_limit={}, time_window={})".format(
self.__class__.__name__,
self._access_limit,
repr(self._time_window),
)
class BidiRpc(object):
"""A helper for consuming a bi-directional streaming RPC.
This maps gRPC's built-in interface which uses a request iterator and a
response iterator into a socket-like :func:`send` and :func:`recv`. This
is a more useful pattern for long-running or asymmetric streams (streams
where there is not a direct correlation between the requests and
responses).
Example::
initial_request = example_pb2.StreamingRpcRequest(
setting='example')
rpc = BidiRpc(
stub.StreamingRpc,
initial_request=initial_request,
metadata=[('name', 'value')]
)
rpc.open()
while rpc.is_active():
print(rpc.recv())
rpc.send(example_pb2.StreamingRpcRequest(
data='example'))
This does *not* retry the stream on errors. See :class:`ResumableBidiRpc`.
Args:
start_rpc (grpc.StreamStreamMultiCallable): The gRPC method used to
start the RPC.
initial_request (Union[protobuf.Message,
Callable[None, protobuf.Message]]): The initial request to
yield. This is useful if an initial request is needed to start the
stream.
metadata (Sequence[Tuple(str, str)]): RPC metadata to include in
the request.
"""
def __init__(self, start_rpc, initial_request=None, metadata=None):
self._start_rpc = start_rpc
self._initial_request = initial_request
self._rpc_metadata = metadata
self._request_queue = queue.Queue()
self._request_generator = None
self._is_active = False
self._callbacks = []
self.call = None
def add_done_callback(self, callback):
"""Adds a callback that will be called when the RPC terminates.
This occurs when the RPC errors or is successfully terminated.
Args:
callback (Callable[[grpc.Future], None]): The callback to execute.
It will be provided with the same gRPC future as the underlying
stream which will also be a :class:`grpc.Call`.
"""
self._callbacks.append(callback)
def _on_call_done(self, future):
for callback in self._callbacks:
callback(future)
def open(self):
"""Opens the stream."""
if self.is_active:
raise ValueError("Can not open an already open stream.")
request_generator = _RequestQueueGenerator(
self._request_queue, initial_request=self._initial_request
)
call = self._start_rpc(iter(request_generator), metadata=self._rpc_metadata)
request_generator.call = call
# TODO: api_core should expose the future interface for wrapped
# callables as well.
if hasattr(call, "_wrapped"): # pragma: NO COVER
call._wrapped.add_done_callback(self._on_call_done)
else:
call.add_done_callback(self._on_call_done)
self._request_generator = request_generator
self.call = call
def close(self):
"""Closes the stream."""
if self.call is None:
return
self._request_queue.put(None)
self.call.cancel()
self._request_generator = None
# Don't set self.call to None. Keep it around so that send/recv can
# raise the error.
def send(self, request):
"""Queue a message to be sent on the stream.
Send is non-blocking.
If the underlying RPC has been closed, this will raise.
Args:
request (protobuf.Message): The request to send.
"""
if self.call is None:
raise ValueError("Can not send() on an RPC that has never been open()ed.")
# Don't use self.is_active(), as ResumableBidiRpc will overload it
# to mean something semantically different.
if self.call.is_active():
self._request_queue.put(request)
else:
# calling next should cause the call to raise.
next(self.call)
def recv(self):
"""Wait for a message to be returned from the stream.
Recv is blocking.
If the underlying RPC has been closed, this will raise.
Returns:
protobuf.Message: The received message.
"""
if self.call is None:
raise ValueError("Can not recv() on an RPC that has never been open()ed.")
return next(self.call)
@property
def is_active(self):
"""bool: True if this stream is currently open and active."""
return self.call is not None and self.call.is_active()
@property
def pending_requests(self):
"""int: Returns an estimate of the number of queued requests."""
return self._request_queue.qsize()
def _never_terminate(future_or_error):
"""By default, no errors cause BiDi termination."""
return False
class ResumableBidiRpc(BidiRpc):
"""A :class:`BidiRpc` that can automatically resume the stream on errors.
It uses the ``should_recover`` arg to determine if it should re-establish
the stream on error.
Example::
def should_recover(exc):
return (
isinstance(exc, grpc.RpcError) and
exc.code() == grpc.StatusCode.UNVAILABLE)
initial_request = example_pb2.StreamingRpcRequest(
setting='example')
metadata = [('header_name', 'value')]
rpc = ResumableBidiRpc(
stub.StreamingRpc,
should_recover=should_recover,
initial_request=initial_request,
metadata=metadata
)
rpc.open()
while rpc.is_active():
print(rpc.recv())
rpc.send(example_pb2.StreamingRpcRequest(
data='example'))
Args:
start_rpc (grpc.StreamStreamMultiCallable): The gRPC method used to
start the RPC.
initial_request (Union[protobuf.Message,
Callable[None, protobuf.Message]]): The initial request to
yield. This is useful if an initial request is needed to start the
stream.
should_recover (Callable[[Exception], bool]): A function that returns
True if the stream should be recovered. This will be called
whenever an error is encountered on the stream.
should_terminate (Callable[[Exception], bool]): A function that returns
True if the stream should be terminated. This will be called
whenever an error is encountered on the stream.
metadata Sequence[Tuple(str, str)]: RPC metadata to include in
the request.
throttle_reopen (bool): If ``True``, throttling will be applied to
stream reopen calls. Defaults to ``False``.
"""
def __init__(
self,
start_rpc,
should_recover,
should_terminate=_never_terminate,
initial_request=None,
metadata=None,
throttle_reopen=False,
):
super(ResumableBidiRpc, self).__init__(start_rpc, initial_request, metadata)
self._should_recover = should_recover
self._should_terminate = should_terminate
self._operational_lock = threading.RLock()
self._finalized = False
self._finalize_lock = threading.Lock()
if throttle_reopen:
self._reopen_throttle = _Throttle(
access_limit=5, time_window=datetime.timedelta(seconds=10),
)
else:
self._reopen_throttle = None
def _finalize(self, result):
with self._finalize_lock:
if self._finalized:
return
for callback in self._callbacks:
callback(result)
self._finalized = True
def _on_call_done(self, future):
# Unlike the base class, we only execute the callbacks on a terminal
# error, not for errors that we can recover from. Note that grpc's
# "future" here is also a grpc.RpcError.
with self._operational_lock:
if self._should_terminate(future):
self._finalize(future)
elif not self._should_recover(future):
self._finalize(future)
else:
_LOGGER.debug("Re-opening stream from gRPC callback.")
self._reopen()
def _reopen(self):
with self._operational_lock:
# Another thread already managed to re-open this stream.
if self.call is not None and self.call.is_active():
_LOGGER.debug("Stream was already re-established.")
return
self.call = None
# Request generator should exit cleanly since the RPC its bound to
# has exited.
self._request_generator = None
# Note: we do not currently do any sort of backoff here. The
# assumption is that re-establishing the stream under normal
# circumstances will happen in intervals greater than 60s.
# However, it is possible in a degenerative case that the server
# closes the stream rapidly which would lead to thrashing here,
# but hopefully in those cases the server would return a non-
# retryable error.
try:
if self._reopen_throttle:
with self._reopen_throttle:
self.open()
else:
self.open()
# If re-opening or re-calling the method fails for any reason,
# consider it a terminal error and finalize the stream.
except Exception as exc:
_LOGGER.debug("Failed to re-open stream due to %s", exc)
self._finalize(exc)
raise
_LOGGER.info("Re-established stream")
def _recoverable(self, method, *args, **kwargs):
"""Wraps a method to recover the stream and retry on error.
If a retryable error occurs while making the call, then the stream will
be re-opened and the method will be retried. This happens indefinitely
so long as the error is a retryable one. If an error occurs while
re-opening the stream, then this method will raise immediately and
trigger finalization of this object.
Args:
method (Callable[..., Any]): The method to call.
args: The args to pass to the method.
kwargs: The kwargs to pass to the method.
"""
while True:
try:
return method(*args, **kwargs)
except Exception as exc:
with self._operational_lock:
_LOGGER.debug("Call to retryable %r caused %s.", method, exc)
if self._should_terminate(exc):
self.close()
_LOGGER.debug("Terminating %r due to %s.", method, exc)
self._finalize(exc)
break
if not self._should_recover(exc):
self.close()
_LOGGER.debug("Not retrying %r due to %s.", method, exc)
self._finalize(exc)
raise exc
_LOGGER.debug("Re-opening stream from retryable %r.", method)
self._reopen()
def _send(self, request):
# Grab a reference to the RPC call. Because another thread (notably
# the gRPC error thread) can modify self.call (by invoking reopen),
# we should ensure our reference can not change underneath us.
# If self.call is modified (such as replaced with a new RPC call) then
# this will use the "old" RPC, which should result in the same
# exception passed into gRPC's error handler being raised here, which
# will be handled by the usual error handling in retryable.
with self._operational_lock:
call = self.call
if call is None:
raise ValueError("Can not send() on an RPC that has never been open()ed.")
# Don't use self.is_active(), as ResumableBidiRpc will overload it
# to mean something semantically different.
if call.is_active():
self._request_queue.put(request)
pass
else:
# calling next should cause the call to raise.
next(call)
def | (self, request):
return self._recoverable(self._send, request)
def _recv(self):
with self._operational_lock:
call = self.call
if call is None:
raise ValueError("Can not recv() on an RPC that has never been open()ed.")
return next(call)
def recv(self):
return self._recoverable(self._recv)
@property
def is_active(self):
"""bool: True if this stream is currently open and active."""
# Use the operational lock. It's entirely possible for something
# to check the active state *while* the RPC is being retried.
# Also, use finalized to track the actual terminal state here.
# This is because if the stream is re-established by the gRPC thread
# it's technically possible to check this between when gRPC marks the
# RPC as inactive and when gRPC executes our callback that re-opens
# the stream.
with self._operational_lock:
return self.call is not None and not self._finalized
class BackgroundConsumer(object):
"""A bi-directional stream consumer that runs in a separate thread.
This maps the consumption of a stream into a callback-based model. It also
provides :func:`pause` and :func:`resume` to allow for flow-control.
Example::
def should_recover(exc):
return (
isinstance(exc, grpc.RpcError) and
exc.code() == grpc.StatusCode.UNVAILABLE)
initial_request = example_pb2.StreamingRpcRequest(
setting='example')
rpc = ResumeableBidiRpc(
stub.StreamingRpc,
initial_request=initial_request,
should_recover=should_recover)
def on_response(response):
print(response)
consumer = BackgroundConsumer(rpc, on_response)
consumer.start()
Note that error handling *must* be done by using the provided
``bidi_rpc``'s ``add_done_callback``. This helper will automatically exit
whenever the RPC itself exits and will not provide any error details.
Args:
bidi_rpc (BidiRpc): The RPC to consume. Should not have been
``open()``ed yet.
on_response (Callable[[protobuf.Message], None]): The callback to
be called for every response on the stream.
"""
def __init__(self, bidi_rpc, on_response):
self._bidi_rpc = bidi_rpc
self._on_response = on_response
self._paused = False
self._wake = threading.Condition()
self._thread = None
self._operational_lock = threading.Lock()
def _on_call_done(self, future):
# Resume the thread if it's paused, this prevents blocking forever
# when the RPC has terminated.
self.resume()
def _thread_main(self, ready):
try:
ready.set()
self._bidi_rpc.add_done_callback(self._on_call_done)
self._bidi_rpc.open()
while self._bidi_rpc.is_active:
# Do not allow the paused status to change at all during this
# section. There is a condition where we could be resumed
# between checking if we are paused and calling wake.wait(),
# which means that we will miss the notification to wake up
# (oops!) and wait for a notification that will never come.
# Keeping the lock throughout avoids that.
# In the future, we could use `Condition.wait_for` if we drop
# Python 2.7.
with self._wake:
while self._paused:
_LOGGER.debug("paused, waiting for waking.")
self._wake.wait()
_LOGGER.debug("woken.")
_LOGGER.debug("waiting for recv.")
response = self._bidi_rpc.recv()
_LOGGER.debug("recved response.")
self._on_response(response)
except exceptions.GoogleAPICallError as exc:
_LOGGER.debug(
"%s caught error %s and will exit. Generally this is due to "
"the RPC itself being cancelled and the error will be "
"surfaced to the calling code.",
_BIDIRECTIONAL_CONSUMER_NAME,
exc,
exc_info=True,
)
except Exception as exc:
_LOGGER.exception(
"%s caught unexpected exception %s and will exit.",
_BIDIRECTIONAL_CONSUMER_NAME,
exc,
)
_LOGGER.info("%s exiting", _BIDIRECTIONAL_CONSUMER_NAME)
def start(self):
"""Start the background thread and begin consuming the thread."""
with self._operational_lock:
ready = threading.Event()
thread = threading.Thread(
name=_BIDIRECTIONAL_CONSUMER_NAME,
target=self._thread_main,
args=(ready,),
)
thread.daemon = True
thread.start()
# Other parts of the code rely on `thread.is_alive` which
# isn't sufficient to know if a thread is active, just that it may
# soon be active. This can cause races. Further protect
# against races by using a ready event and wait on it to be set.
ready.wait()
self._thread = thread
_LOGGER.debug("Started helper thread %s", thread.name)
def stop(self):
"""Stop consuming the stream and shutdown the background thread."""
with self._operational_lock:
self._bidi_rpc.close()
if self._thread is not None:
# Resume the thread to wake it up in case it is sleeping.
self.resume()
self._thread.join()
self._thread = None
@property
def is_active(self):
"""bool: True if the background thread is active."""
return self._thread is not None and self._thread.is_alive()
def pause(self):
"""Pauses the response stream.
This does *not* pause the request stream.
"""
with self._wake:
self._paused = True
def resume(self):
"""Resumes the response stream."""
with self._wake:
self._paused = False
self._wake.notifyAll()
@property
def is_paused(self):
"""bool: True if the response stream is paused."""
return self._paused
| send |
config.py | import transformers
import argparse
def none_or_str(value):
if value == 'None':
return None
return value
def primary_parse():
parser = argparse.ArgumentParser()
parser.add_argument('--level') # {"token" "comment"}
parser.add_argument('--max_len', type=int, default=256)
parser.add_argument('--max_len_context', type=int, default=64)
parser.add_argument('--context', type=none_or_str) # {"parent", "title"}
parser.add_argument('--train_batch_size', type=int, default=8)
parser.add_argument('--valid_batch_size', type=int, default=16)
parser.add_argument('--test_batch_size', type=int, default=16)
parser.add_argument('--epochs', type=int, default=10) | parser.add_argument('--model') # {"bertModel" #mgnModel}
parser.add_argument('--folder') # path to folder with data splits
parser.add_argument('--classes') #{"multi" "binary"}
parser.add_argument('--alpha', type=float, default=0.5)
return parser
def secondary_parse(args):
sec_args = {}
sec_args['training_file'] = f"../data/{args.folder}/train.txt"
sec_args['valid_file'] = f"../data/{args.folder}/dev.txt"
sec_args['test_file'] = f"../data/{args.folder}/test.txt"
sec_args['tokenizer'] = transformers.BertTokenizer.from_pretrained( args.base_model, do_lower_case=True, local_files_only=True)
sec_args['model_path'] = f"{args.level}_{args.base_model}_{args.epochs}_{args.model}_{args.folder}_{args.classes}_{args.context}_{args.seed}.bin"
return sec_args | parser.add_argument('--train_flag', type=int, default=1) # specify 0 to evaluate on test data only
parser.add_argument('--seed', type=int, default=100)
parser.add_argument('--base_model', default='bert-base-uncased') |
ae.py | #!/usr/bin/env python3
###############################################################################
# #
# RMG - Reaction Mechanism Generator #
# #
# Copyright (c) 2002-2021 Prof. William H. Green ([email protected]), #
# Prof. Richard H. West ([email protected]) and the RMG Team ([email protected]) #
# #
# Permission is hereby granted, free of charge, to any person obtaining a #
# copy of this software and associated documentation files (the 'Software'), #
# to deal in the Software without restriction, including without limitation #
# the rights to use, copy, modify, merge, publish, distribute, sublicense, #
# and/or sell copies of the Software, and to permit persons to whom the #
# Software is furnished to do so, subject to the following conditions: #
# #
# The above copyright notice and this permission notice shall be included in #
# all copies or substantial portions of the Software. #
# #
# THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR #
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, #
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE #
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING #
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER #
# DEALINGS IN THE SOFTWARE. #
# #
###############################################################################
"""
This module provides classes for fitting atom energies based on a very
small, predetermined set of molecules.
"""
import importlib
import json
import logging
from collections import Counter
from typing import Dict, Hashable, List, Union
import numpy as np
from scipy.stats import distributions
from rmgpy import constants
from rmgpy.molecule import get_element, Molecule
import arkane.encorr.data as data
from arkane.encorr.reference import ReferenceDatabase
from arkane.modelchem import LevelOfTheory, CompositeLevelOfTheory
# List of species labels that will be used for fitting (labels should match reference database)
SPECIES_LABELS = [
'Dihydrogen',
'Dinitrogen',
'Dioxygen',
'Disulfur',
'Difluorine',
'Dichlorine',
'Dibromine',
'Hydrogen fluoride',
'Hydrogen chloride',
'Hydrogen bromide',
'Hydrogen sulfide',
'Water',
'Methane',
'Methyl',
'Ammonia',
'Chloromethane'
]
class AEJob:
"""
A job for fitting atom energies.
"""
def __init__(self,
species_energies: Dict[str, float],
level_of_theory: Union[LevelOfTheory, CompositeLevelOfTheory] = None,
write_to_database: bool = False,
overwrite: bool = False):
"""
Initialize an AEJob instance.
Notes:
The species energies should be provided as a dictionary
containing the species labels as keys and their single-
point electronic energies in Hartree as values. The
energies should be calculated using the experimental
geometry provided for the species in the reference
database, and the zero-point energy should not be included
in the electronic energy.
Args:
species_energies: Dictionary of species labels with single-point electronic energies (Hartree).
level_of_theory: Dictionary key for saving atom energies to the database.
write_to_database: Save the fitted atom energies directly to the RMG database.
overwrite: Overwrite atom energies in the RMG database if they already exist.
"""
self.spcs_energies = species_energies
self.level_of_theory = level_of_theory
self.write_to_database = write_to_database
self.overwrite = overwrite
self.ae = AE(species_energies)
def execute(self, output_file: str = None):
"""
Execute the atom energy job.
Args:
output_file: Write the fitted energies to this file.
"""
if self.level_of_theory is None:
logging.info('Fitting atom energies')
else:
logging.info(f'Fitting atom energies for {self.level_of_theory}')
self.ae.fit()
if output_file is not None:
with open(output_file, 'a') as f:
if self.level_of_theory is not None:
f.write(f'# {self.level_of_theory}\n')
for element, energy in self.ae.atom_energies.items():
f.write(f'# {element:2}: {energy:15.8f} +/- {self.ae.confidence_intervals[element]:.8f} Hartree\n')
f.writelines(self.ae.format_atom_energies(
'atom_energies' if self.level_of_theory is None else self.level_of_theory))
if self.write_to_database:
if self.level_of_theory is None:
raise Exception('Level of theory is required for writing to database')
try:
self.ae.write_to_database(self.level_of_theory, overwrite=self.overwrite)
except ValueError as e:
logging.warning('Could not write atom energies to database. Captured error:')
logging.warning(str(e))
class AE:
"""
A class for fitting atom energies.
"""
ref_data_src = 'CCCBDB' # Use CCCBDB data
ref_data = None # Dictionary of reference data entries
def __init__(self, species_energies: Dict[str, float]):
self.species_energies = species_energies # Hartree
self.atom_energies = None
self.confidence_intervals = None
for lbl in SPECIES_LABELS:
if lbl not in self.species_energies:
logging.warning(f'{lbl} missing from provided species energies!')
@classmethod
def _load_refdata(cls):
if cls.ref_data is None:
logging.info('Loading reference database')
db = ReferenceDatabase()
db.load()
cls.ref_data = {lbl: spc for lbl, spc in zip(SPECIES_LABELS, db.get_species_from_label(SPECIES_LABELS))}
def fit(self):
"""
Fit atom energies using the provided species energies and
corresponding atomization energies from the reference data.
"""
self._load_refdata()
mols = [
Molecule().from_adjacency_list(
self.ref_data[lbl].adjacency_list,
raise_atomtype_exception=False,
raise_charge_exception=False
) for lbl in self.species_energies
]
atom_counts = [Counter(atom.element.symbol for atom in mol.atoms) for mol in mols]
elements = sorted({element for ac in atom_counts for element in ac}, key=lambda s: get_element(s).number)
x = np.array([[ac[element] for element in elements] for ac in atom_counts]) # Nmols x Nelements
atomization_energies = np.array([
self.ref_data[lbl].reference_data[self.ref_data_src].atomization_energy.value_si
/ constants.E_h / constants.Na for lbl in self.species_energies
])
zpes = np.array([
self.ref_data[lbl].reference_data[self.ref_data_src].zpe.value_si
/ constants.E_h / constants.Na for lbl in self.species_energies
])
elec_energies = np.array(list(self.species_energies.values())) # Should already be in Hartree
y = atomization_energies + elec_energies + zpes
w = np.linalg.solve(x.T @ x, x.T @ y)
self.atom_energies = dict(zip(elements, w))
# Get confidence intervals
n = len(y) # Ndata
k = len(w) # Nparam
ypred = x @ w
sigma2 = np.sum((y - ypred)**2) / (n - k - 1) # MSE
cov = sigma2 * np.linalg.inv(x.T @ x) # covariance matrix
se = np.sqrt(np.diag(cov)) # standard error
alpha = 0.05 # 95% confidence level
tdist = distributions.t.ppf(1 - alpha/2, n - k - 1) # student-t
ci = tdist * se # confidence interval half-width
self.confidence_intervals = dict(zip(elements, ci)) # Parameter estimates are w +/- ci
def write_to_database(self, key: Hashable, overwrite: bool = False, alternate_path: str = None):
"""
Write atom energies to database.
Args:
key: Dictionary key to use for atom energies in database.
overwrite: Overwrite existing atom energies.
alternate_path: Write atom energies and existing database to this path instead.
"""
if self.atom_energies is None:
raise ValueError('No atom energies available for writing')
data_path = data.quantum_corrections_path
with open(data_path) as f:
lines = f.readlines()
ae_formatted = self.format_atom_energies(key, indent=True)
# Add new atom energies to file without changing existing formatting
for i, line in enumerate(lines):
if 'atom_energies' in line:
if key in data.atom_energies:
if overwrite:
# Does not overwrite comments
del_idx_start = del_idx_end = None
for j, line2 in enumerate(lines[i:]):
if repr(key) in line2:
del_idx_start = i + j
del_idx_end = None
elif line2.rstrip() == ' },': # Can't have a comment after final brace
del_idx_end = i + j + 1
if del_idx_start is not None and del_idx_end is not None:
if (lines[del_idx_start - 1].lstrip().startswith('#')
or lines[del_idx_end + 1].lstrip().startswith('#')):
logging.warning('There may be left over comments from previous atom energies')
lines[del_idx_start:del_idx_end] = ae_formatted
break
else:
raise ValueError(f'{key} already exists. Set `overwrite` to True.')
else:
lines[(i+1):(i+1)] = ['\n'] + ae_formatted
break
with open(data_path if alternate_path is None else alternate_path, 'w') as f:
f.writelines(lines)
# Reload data to update atom energy dictionary
if alternate_path is None:
importlib.reload(data)
def format_atom_energies(self, key: Hashable, indent: bool = False) -> List[str]:
"""
Obtain a list of nicely formatted atom energies suitable for
writelines.
Args:
key: Dictionary key to use for formatting dictionary.
indent: Indent each line.
Returns:
Formatted list of atom energies.
"""
ae_formatted = json.dumps(self.atom_energies, indent=4).replace('"', "'").split('\n') | ae_formatted = [' ' + e for e in ae_formatted]
return ae_formatted | ae_formatted[0] = f'"{key}": ' + ae_formatted[0]
ae_formatted[-1] += ','
ae_formatted = [e + '\n' for e in ae_formatted]
if indent: |
switch-manage-routing.module.ts | import { NgModule } from '@angular/core';
import { RouterModule, Routes } from '@angular/router';
import { StatisticalReportResolver } from './statistical-report/statistical-report-resolver.service';
import { SwitchManageComponent } from './switch-manage.component';
import { SwicthSettingResolver } from './switch-setting/swicth-setting-resolver.service';
import { TargetConditionsResolver } from './target-conditions/target-conditions-resolver.service';
const routes: Routes = [
{
path: '',
data: {
breadcrumb: '开关管理'
},
component: SwitchManageComponent,
children: [
{
path: '',
loadChildren: () => import("./switch-index/switch-index.module").then(m => m.SwitchIndexModule)
}, {
path: 'setting/:id',
resolve: { switchInfo: SwicthSettingResolver },
loadChildren: () => import("./switch-setting/switch-setting.module").then(m => m.SwitchSettingModule),
data: {
breadcrumb: '开关详情'
}
}, {
path: 'report/:id',
resolve: { switchInfo: StatisticalReportResolver },
loadChildren: () => import("./statistical-report/statistical-report.module").then(m => m.StatisticalReportModule),
data: {
breadcrumb: '开关详情'
}
}, {
path: 'condition/:id',
resolve: { switchInfo: TargetConditionsResolver },
loadChildren: () => import("./target-conditions/target-conditions.module").then(m => m.TargetConditionsModule),
data: {
breadcrumb: '开关详情'
}
}, {
path: 'triggers/:id',
resolve: { switchInfo: SwicthSettingResolver },
loadChildren: () => import("./flag-triggers/flag-triggers.module").then(m => m.FlagTriggersModule),
data: {
breadcrumb: '开关详情'
}
}, {
path: 'experimentations/:id',
resolve: { switchInfo: SwicthSettingResolver },
loadChildren: () => import("./experimentation/experimentation.module").then(m => m.ExperimentationModule),
data: {
breadcrumb: '开关详情'
}
}, {
path: '',
redirectTo: '/switch-manage'
}
]
}
];
@NgModule({
imports: [RouterModule.forChild(routes)],
exports: [RouterModule],
providers: [
SwicthSettingResolver,
TargetConditionsResolver,
StatisticalReportResolver
]
})
export class SwitchManageRoutingModule { }
| ||
app.module.ts | import { BrowserAnimationsModule } from '@angular/platform-browser/animations';
import { NgModule } from '@angular/core';
import { FormBuilder, FormsModule, ReactiveFormsModule } from '@angular/forms';
import { HttpClientModule } from '@angular/common/http';
import { MatSnackBarModule } from '@angular/material';
import { BrowserModule } from '@angular/platform-browser';
import { RouterModule } from '@angular/router';
import { AppComponent } from './app.component';
import { NavComponent } from './components/nav/nav.component';
import { HomeComponent } from './components/home/home.component';
import { LoginComponent } from './components/login/login.component';
import { RegisterComponent } from './components/register/register.component';
import { WorkoutDetailComponent } from './components/workout-detail/workout-detail.component';
import { WorkoutsComponent } from './components/workouts/workouts.component';
import { NewWorkoutDetailComponent } from './components/new-workout-detail/new-workout-detail';
import { NewExerciseDetailComponent } from './components/new-exercise-detail/new-exercise-detail.component';
import { NewExercisesComponent } from './components/new-exercise/new-exercise.component';
import { TrainersComponent } from './components/trainers/trainers.component';
import { WebService } from './services/web.service';
import { AuthService } from './services/auth.service';
import { AppRoutingModule } from './app.routes';
import { AuthGuardService } from './services/auth-guard.service';
import { TrainerStoreService } from './services/trainer-store.service';
import { UserComponent } from './components/user/user.component';
@NgModule( {
declarations: [
AppComponent, NavComponent, HomeComponent, LoginComponent, RegisterComponent,
WorkoutsComponent, WorkoutDetailComponent, NewWorkoutDetailComponent,
NewExerciseDetailComponent, NewExercisesComponent, TrainersComponent, UserComponent
],
imports: [
BrowserModule, BrowserAnimationsModule, MatSnackBarModule, AppRoutingModule,
HttpClientModule, RouterModule, FormsModule, ReactiveFormsModule
],
providers: [AuthService, WebService, FormBuilder, AuthGuardService, TrainerStoreService],
bootstrap: [AppComponent]
} )
export class | {}
| AppModule |
mnist.py | from keras.datasets import mnist
from keras.utils import np_utils
import sys, os
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from models.carlini_models import carlini_mnist_model
from models.cleverhans_models import cleverhans_mnist_model
from models.pgdtrained_models import pgdtrained_mnist_model
class MNISTDataset:
def | (self):
self.dataset_name = "MNIST"
self.image_size = 28
self.num_channels = 1
self.num_classes = 10
def get_test_dataset(self):
(X_train, y_train), (X_test, y_test) = mnist.load_data()
X_test = X_test.reshape(X_test.shape[0], self.image_size, self.image_size, self.num_channels)
X_test = X_test.astype('float32')
X_test /= 255
Y_test = np_utils.to_categorical(y_test, self.num_classes)
del X_train, y_train
return X_test, Y_test
def get_val_dataset(self):
(X_train, y_train), (X_test, y_test) = mnist.load_data()
val_size = 5000
X_val = X_train[:val_size]
X_val = X_val.reshape(X_val.shape[0], self.image_size, self.image_size, self.num_channels)
X_val = X_val.astype('float32') / 255
y_val = y_train[:val_size]
Y_val = np_utils.to_categorical(y_val, self.num_classes)
del X_train, y_train, X_test, y_test
return X_val, Y_val
def load_model_by_name(self, model_name, logits=False, input_range_type=1, pre_filter=lambda x:x):
"""
:params logits: return logits(input of softmax layer) if True; return softmax output otherwise.
:params input_range_type: {1: [0,1], 2:[-0.5, 0.5], 3:[-1, 1]...}
"""
if model_name not in ["cleverhans", 'cleverhans_adv_trained', 'carlini', 'pgdtrained', 'pgdbase']:
raise NotImplementedError("Undefined model [%s] for %s." % (model_name, self.dataset_name))
self.model_name = model_name
model_weights_fpath = "%s_%s.keras_weights.h5" % (self.dataset_name, model_name)
model_weights_fpath = os.path.join('downloads/trained_models', model_weights_fpath)
# self.maybe_download_model()
if model_name in ["cleverhans", 'cleverhans_adv_trained']:
model = cleverhans_mnist_model(logits=logits, input_range_type=input_range_type, pre_filter=pre_filter)
elif model_name in ['carlini']:
model = carlini_mnist_model(logits=logits, input_range_type = input_range_type, pre_filter=pre_filter)
elif model_name in ['pgdtrained', 'pgdbase']:
model = pgdtrained_mnist_model(logits=logits, input_range_type = input_range_type, pre_filter=pre_filter)
print("\n===Defined TensorFlow model graph.")
model.load_weights(model_weights_fpath)
print ("---Loaded MNIST-%s model.\n" % model_name)
return model
if __name__ == '__main__':
# from datasets.mnist import *
dataset = MNISTDataset()
X_test, Y_test = dataset.get_test_dataset()
print (X_test.shape)
print (Y_test.shape)
model_name = 'cleverhans'
model = dataset.load_model_by_name(model_name)
model.compile(loss='categorical_crossentropy',optimizer='sgd', metrics=['acc'])
_,accuracy = model.evaluate(X_test, Y_test, batch_size=128)
print ("\nTesting accuracy: %.4f" % accuracy)
| __init__ |
alt_src_data_end_ptr_ch7.rs | #[doc = "Register `ALT_SRC_DATA_END_PTR_CH7` reader"]
pub struct R(crate::R<ALT_SRC_DATA_END_PTR_CH7_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<ALT_SRC_DATA_END_PTR_CH7_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<ALT_SRC_DATA_END_PTR_CH7_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<ALT_SRC_DATA_END_PTR_CH7_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `ALT_SRC_DATA_END_PTR_CH7` writer"]
pub struct W(crate::W<ALT_SRC_DATA_END_PTR_CH7_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<ALT_SRC_DATA_END_PTR_CH7_SPEC>;
#[inline(always)]
fn | (&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<ALT_SRC_DATA_END_PTR_CH7_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<ALT_SRC_DATA_END_PTR_CH7_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Field `ALT_SRC_DATA_END_PTR_CH7` reader - Alternate pointer to the end address of the source data of channel 7"]
pub struct ALT_SRC_DATA_END_PTR_CH7_R(crate::FieldReader<u32, u32>);
impl ALT_SRC_DATA_END_PTR_CH7_R {
#[inline(always)]
pub(crate) fn new(bits: u32) -> Self {
ALT_SRC_DATA_END_PTR_CH7_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for ALT_SRC_DATA_END_PTR_CH7_R {
type Target = crate::FieldReader<u32, u32>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `ALT_SRC_DATA_END_PTR_CH7` writer - Alternate pointer to the end address of the source data of channel 7"]
pub struct ALT_SRC_DATA_END_PTR_CH7_W<'a> {
w: &'a mut W,
}
impl<'a> ALT_SRC_DATA_END_PTR_CH7_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u32) -> &'a mut W {
self.w.bits = value as u32;
self.w
}
}
impl R {
#[doc = "Bits 0:31 - Alternate pointer to the end address of the source data of channel 7"]
#[inline(always)]
pub fn alt_src_data_end_ptr_ch7(&self) -> ALT_SRC_DATA_END_PTR_CH7_R {
ALT_SRC_DATA_END_PTR_CH7_R::new(self.bits as u32)
}
}
impl W {
#[doc = "Bits 0:31 - Alternate pointer to the end address of the source data of channel 7"]
#[inline(always)]
pub fn alt_src_data_end_ptr_ch7(&mut self) -> ALT_SRC_DATA_END_PTR_CH7_W {
ALT_SRC_DATA_END_PTR_CH7_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "Alternate pointer to the end address of the source data of channel 7\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [alt_src_data_end_ptr_ch7](index.html) module"]
pub struct ALT_SRC_DATA_END_PTR_CH7_SPEC;
impl crate::RegisterSpec for ALT_SRC_DATA_END_PTR_CH7_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [alt_src_data_end_ptr_ch7::R](R) reader structure"]
impl crate::Readable for ALT_SRC_DATA_END_PTR_CH7_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [alt_src_data_end_ptr_ch7::W](W) writer structure"]
impl crate::Writable for ALT_SRC_DATA_END_PTR_CH7_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets ALT_SRC_DATA_END_PTR_CH7 to value 0"]
impl crate::Resettable for ALT_SRC_DATA_END_PTR_CH7_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
| deref |
indicator_search_v1_parameters.go | // Code generated by go-swagger; DO NOT EDIT.
package ioc
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"context"
"net/http"
"time"
"github.com/go-openapi/errors"
"github.com/go-openapi/runtime"
cr "github.com/go-openapi/runtime/client"
"github.com/go-openapi/strfmt"
"github.com/go-openapi/swag"
) |
// NewIndicatorSearchV1Params creates a new IndicatorSearchV1Params object,
// with the default timeout for this client.
//
// Default values are not hydrated, since defaults are normally applied by the API server side.
//
// To enforce default values in parameter, use SetDefaults or WithDefaults.
func NewIndicatorSearchV1Params() *IndicatorSearchV1Params {
return &IndicatorSearchV1Params{
timeout: cr.DefaultTimeout,
}
}
// NewIndicatorSearchV1ParamsWithTimeout creates a new IndicatorSearchV1Params object
// with the ability to set a timeout on a request.
func NewIndicatorSearchV1ParamsWithTimeout(timeout time.Duration) *IndicatorSearchV1Params {
return &IndicatorSearchV1Params{
timeout: timeout,
}
}
// NewIndicatorSearchV1ParamsWithContext creates a new IndicatorSearchV1Params object
// with the ability to set a context for a request.
func NewIndicatorSearchV1ParamsWithContext(ctx context.Context) *IndicatorSearchV1Params {
return &IndicatorSearchV1Params{
Context: ctx,
}
}
// NewIndicatorSearchV1ParamsWithHTTPClient creates a new IndicatorSearchV1Params object
// with the ability to set a custom HTTPClient for a request.
func NewIndicatorSearchV1ParamsWithHTTPClient(client *http.Client) *IndicatorSearchV1Params {
return &IndicatorSearchV1Params{
HTTPClient: client,
}
}
/* IndicatorSearchV1Params contains all the parameters to send to the API endpoint
for the indicator search v1 operation.
Typically these are written to a http.Request.
*/
type IndicatorSearchV1Params struct {
/* Filter.
The filter expression that should be used to limit the results.
*/
Filter *string
/* Limit.
The maximum records to return.
*/
Limit *int64
/* Offset.
The offset to start retrieving records from. Offset and After params are mutually exclusive. If none provided then scrolling will be used by default.
*/
Offset *int64
/* Sort.
The sort expression that should be used to sort the results.
*/
Sort *string
timeout time.Duration
Context context.Context
HTTPClient *http.Client
}
// WithDefaults hydrates default values in the indicator search v1 params (not the query body).
//
// All values with no default are reset to their zero value.
func (o *IndicatorSearchV1Params) WithDefaults() *IndicatorSearchV1Params {
o.SetDefaults()
return o
}
// SetDefaults hydrates default values in the indicator search v1 params (not the query body).
//
// All values with no default are reset to their zero value.
func (o *IndicatorSearchV1Params) SetDefaults() {
// no default values defined for this parameter
}
// WithTimeout adds the timeout to the indicator search v1 params
func (o *IndicatorSearchV1Params) WithTimeout(timeout time.Duration) *IndicatorSearchV1Params {
o.SetTimeout(timeout)
return o
}
// SetTimeout adds the timeout to the indicator search v1 params
func (o *IndicatorSearchV1Params) SetTimeout(timeout time.Duration) {
o.timeout = timeout
}
// WithContext adds the context to the indicator search v1 params
func (o *IndicatorSearchV1Params) WithContext(ctx context.Context) *IndicatorSearchV1Params {
o.SetContext(ctx)
return o
}
// SetContext adds the context to the indicator search v1 params
func (o *IndicatorSearchV1Params) SetContext(ctx context.Context) {
o.Context = ctx
}
// WithHTTPClient adds the HTTPClient to the indicator search v1 params
func (o *IndicatorSearchV1Params) WithHTTPClient(client *http.Client) *IndicatorSearchV1Params {
o.SetHTTPClient(client)
return o
}
// SetHTTPClient adds the HTTPClient to the indicator search v1 params
func (o *IndicatorSearchV1Params) SetHTTPClient(client *http.Client) {
o.HTTPClient = client
}
// WithFilter adds the filter to the indicator search v1 params
func (o *IndicatorSearchV1Params) WithFilter(filter *string) *IndicatorSearchV1Params {
o.SetFilter(filter)
return o
}
// SetFilter adds the filter to the indicator search v1 params
func (o *IndicatorSearchV1Params) SetFilter(filter *string) {
o.Filter = filter
}
// WithLimit adds the limit to the indicator search v1 params
func (o *IndicatorSearchV1Params) WithLimit(limit *int64) *IndicatorSearchV1Params {
o.SetLimit(limit)
return o
}
// SetLimit adds the limit to the indicator search v1 params
func (o *IndicatorSearchV1Params) SetLimit(limit *int64) {
o.Limit = limit
}
// WithOffset adds the offset to the indicator search v1 params
func (o *IndicatorSearchV1Params) WithOffset(offset *int64) *IndicatorSearchV1Params {
o.SetOffset(offset)
return o
}
// SetOffset adds the offset to the indicator search v1 params
func (o *IndicatorSearchV1Params) SetOffset(offset *int64) {
o.Offset = offset
}
// WithSort adds the sort to the indicator search v1 params
func (o *IndicatorSearchV1Params) WithSort(sort *string) *IndicatorSearchV1Params {
o.SetSort(sort)
return o
}
// SetSort adds the sort to the indicator search v1 params
func (o *IndicatorSearchV1Params) SetSort(sort *string) {
o.Sort = sort
}
// WriteToRequest writes these params to a swagger request
func (o *IndicatorSearchV1Params) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error {
if err := r.SetTimeout(o.timeout); err != nil {
return err
}
var res []error
if o.Filter != nil {
// query param filter
var qrFilter string
if o.Filter != nil {
qrFilter = *o.Filter
}
qFilter := qrFilter
if qFilter != "" {
if err := r.SetQueryParam("filter", qFilter); err != nil {
return err
}
}
}
if o.Limit != nil {
// query param limit
var qrLimit int64
if o.Limit != nil {
qrLimit = *o.Limit
}
qLimit := swag.FormatInt64(qrLimit)
if qLimit != "" {
if err := r.SetQueryParam("limit", qLimit); err != nil {
return err
}
}
}
if o.Offset != nil {
// query param offset
var qrOffset int64
if o.Offset != nil {
qrOffset = *o.Offset
}
qOffset := swag.FormatInt64(qrOffset)
if qOffset != "" {
if err := r.SetQueryParam("offset", qOffset); err != nil {
return err
}
}
}
if o.Sort != nil {
// query param sort
var qrSort string
if o.Sort != nil {
qrSort = *o.Sort
}
qSort := qrSort
if qSort != "" {
if err := r.SetQueryParam("sort", qSort); err != nil {
return err
}
}
}
if len(res) > 0 {
return errors.CompositeValidationError(res...)
}
return nil
} | |
people-list.component.ts | import { Observable } from 'rxjs/Rx';
import { Response } from '@angular/http';
import { PeopleService } from './people.service'; | import { Component } from '@angular/core';
import {Person} from './person';
@Component({
selector: 'people-list',
template: `
<section *ngIf="isLoading && !errorMessage">
Loading our hyperdrives!!! Retrieving data...
</section>
<section>
<ul>
<li *ngFor="let person of people">
<a href="#" [routerLink]="['/persons', person.id]">
{{person.name}}
</a>
</li>
</ul>
</section>
<section *ngIf="errorMessage">
{{errorMessage}}
</section>
`
})
export class PeopleListComponent {
people: Person[] = [];
errorMessage: string = '';
isLoading: boolean = true;
constructor(private peopleService: PeopleService) {
}
ngOnInit() {
this.peopleService
.getAll()
.subscribe(
p => this.people = p,
e => this.errorMessage = e,
() => this.isLoading = false);
/*
You can also use the below pattern to load the person in an async way.
1. Change people from people: Person[] = []; to people: Observable<Person[]>;
2. Replace the ngOnInit code to this this.people = this.peopleService.getAll();
3. Add the 'async' pipe into the *ngFor like: <li *ngFor="let person of people | async">
*/
}
} | |
test_datatables.py | # -*- coding: utf-8 -*-
from inspect import isgenerator
from django.apps import apps
from .testcase import DatatableViewTestCase
from datatableview.exceptions import ColumnError
from datatableview.datatables import Datatable, ValuesDatatable
from datatableview.views import DatatableJSONResponseMixin, DatatableView
from datatableview.columns import TextColumn, Column, BooleanColumn
ExampleModel = apps.get_model('test_app', 'ExampleModel')
RelatedModel = apps.get_model('test_app', 'RelatedModel')
class DatatableTests(DatatableViewTestCase):
def test_normalize_config(self):
dt = Datatable([], '/')
dt.configure()
self.assertEqual(dt.config['hidden_columns'], [])
self.assertEqual(dt.config['search_fields'], [])
self.assertEqual(dt.config['unsortable_columns'], [])
self.assertEqual(dt.config['search'], set())
self.assertEqual(dt.config['start_offset'], 0)
self.assertEqual(dt.config['page_length'], 25)
self.assertEqual(dt.config['ordering'], None)
def test_column_names_list_raises_unknown_columns(self):
class DT(Datatable):
class Meta:
model = ExampleModel
columns = ['fake']
dt = DT([], '/')
with self.assertRaises(ColumnError) as cm:
dt.configure()
self.assertEqual(str(cm.exception), "Unknown column name(s): ('fake',)")
def test_column_names_list_finds_local_fields(self):
class DT(Datatable):
class Meta:
model = ExampleModel
columns = ['name']
class NoError(BaseException):
pass
with self.assertRaises(NoError):
dt = DT([], '/')
raise NoError()
def test_column_names_list_raises_related_columns(self):
# This was the old way of including related data, but this is no longer supported
class DT(Datatable):
class Meta:
model = ExampleModel
columns = ['related__name']
dt = DT([], '/')
with self.assertRaises(ColumnError) as cm:
dt.configure()
self.assertEqual(str(cm.exception), "Unknown column name(s): ('related__name',)")
def test_column_names_list_finds_related_fields(self):
class DT(Datatable):
related = TextColumn('Related', ['related__name'])
class Meta:
model = ExampleModel
columns = ['name', 'related']
class NoError(BaseException):
pass
with self.assertRaises(NoError):
dt = DT([], '/')
raise NoError()
def test_get_ordering_splits(self):
# Verify empty has blank db-backed list and virtual list
dt = Datatable([], '/')
dt.configure()
self.assertEqual(dt.get_ordering_splits(), ([], []))
class DT(Datatable):
fake = TextColumn('Fake', sources=['get_absolute_url'])
class Meta:
model = ExampleModel
columns = ['name', 'fake']
# Verify a fake field name ends up separated from the db-backed field
dt = DT([], '/', query_config={'order[0][column]': '0', 'order[0][dir]': 'asc'}) # iSortingCols': '1',
dt.configure()
self.assertEqual(dt.get_ordering_splits(), (['name'], []))
# Verify ['name', 'fake'] ordering sends 'name' to db sort list, but keeps 'fake' in manual
# sort list.
dt = DT([], '/', query_config={'order[0][column]': '0', 'order[0][dir]': 'asc', 'order[1][column]': '1', 'order[1][dir]': 'asc'}) # 'iSortingCols': '2',
dt.configure()
self.assertEqual(dt.get_ordering_splits(), (['name'], ['fake']))
# Verify a fake field name as the sort column correctly finds no db sort fields
dt = DT([], '/', query_config={'order[0][column]': '1', 'order[0][dir]': 'asc'}) # 'iSortingCols': '1',
dt.configure()
self.assertEqual(dt.get_ordering_splits(), ([], ['fake']))
# Verify ['fake', 'name'] ordering sends both fields to manual sort list
dt = DT([], '/', query_config={'order[0][column]': '1', 'order[0][dir]': 'asc', 'order[1][column]': '0', 'order[1][dir]': 'asc'}) # 'iSortingCols': '2',
dt.configure()
self.assertEqual(dt.get_ordering_splits(), ([], ['fake', 'name']))
def test_get_records_populates_cache(self):
ExampleModel.objects.create(name='test name')
queryset = ExampleModel.objects.all()
dt = Datatable(queryset, '/')
dt.get_records()
self.assertIsNotNone(dt._records)
records = dt._records
# _records doesn't change when run again
dt.get_records()
self.assertEqual(dt._records, records)
def test_populate_records_searches(self):
obj1 = ExampleModel.objects.create(name='test name 1', value=False)
obj2 = ExampleModel.objects.create(name='test name 2', value=True)
queryset = ExampleModel.objects.all()
class DT(Datatable):
class Meta:
model = ExampleModel
columns = ['name', 'value']
dt = DT(queryset, '/')
# Sanity check for correct initial queryset
dt.populate_records()
self.assertIsNotNone(dt._records)
self.assertEqual(list(dt._records), list(queryset))
# Verify a search eliminates items from _records
dt = DT(queryset, '/', query_config={'search[value]': 'test name 1'})
dt.populate_records()
self.assertIsNotNone(dt._records)
self.assertEqual(list(dt._records), [obj1])
def test_populate_records_sorts(self):
obj1 = ExampleModel.objects.create(name='test name 1')
obj2 = ExampleModel.objects.create(name='test name 2')
queryset = ExampleModel.objects.all()
class DT(Datatable):
class Meta:
model = ExampleModel
columns = ['name']
dt = DT(queryset, '/')
# Sanity check for correct initial queryset
dt.populate_records()
self.assertIsNotNone(dt._records)
self.assertEqual(list(dt._records), list(queryset))
# Verify a sort changes the ordering of the records list
dt = DT(queryset, '/', query_config={'order[0][column]': '0', 'order[0][dir]': 'desc'}) # # 'iSortingCols': '1',
dt.populate_records()
self.assertIsNotNone(dt._records)
self.assertEqual(list(dt._records), [obj2, obj1])
def test_populate_records_avoids_column_callbacks(self):
obj1 = ExampleModel.objects.create(name='test name 1')
queryset = ExampleModel.objects.all()
class DT(Datatable):
def preload_record_data(self, obj):
raise Exception("Don't run this")
dt = DT(queryset, '/')
try:
dt.populate_records()
except Exception as e:
if str(e) == "Don't run this":
raise AssertionError('Per-row callbacks being executed!')
raise
def test_preload_record_data_calls_view(self):
obj1 = ExampleModel.objects.create(name='test name 1')
queryset = ExampleModel.objects.all()
class Dummy(object):
def preload_record_data(self, obj):
raise Exception('We did it')
dt = Datatable(queryset, '/', callback_target=Dummy())
with self.assertRaises(Exception) as cm:
dt.get_records()
self.assertEqual(str(cm.exception), 'We did it')
def | (self):
# Defined so that 'pk' order != 'name' order
obj1 = ExampleModel.objects.create(name='b')
obj2 = ExampleModel.objects.create(name='a')
queryset = ExampleModel.objects.all()
class DT(Datatable):
name = TextColumn('Name', sources=['name'])
class Meta:
model = ExampleModel
columns = ['name']
ordering = ['name']
dt = DT(queryset, '/')
dt.populate_records()
self.assertEqual(dt.get_ordering_splits(), (['name'], []))
self.assertEqual(list(dt._records), [obj2, obj1])
# this is to keep DatatableView class from overriding the Meta ordering in Datatable
class DTV(DatatableView):
datatable_class = DT
model = ExampleModel
dtv = DTV().get_datatable(url='/')
self.assertIn('<th data-name="name" data-config-sortable="true" data-config-sorting="0,0,asc" data-config-visible="true">Name</th>', dtv.__str__())
class DT(Datatable):
name = TextColumn('Name', sources=['name'])
class Meta:
model = ExampleModel
columns = ['name']
ordering = ['-name']
dt = DT(queryset, '/')
dt.populate_records()
self.assertEqual(dt.get_ordering_splits(), (['-name'], []))
self.assertEqual(list(dt._records), [obj1, obj2])
def test_sort_prioritizes_db_source(self):
# Defined so that 'pk' order != 'name' order
obj1 = ExampleModel.objects.create(name='test name 2')
obj2 = ExampleModel.objects.create(name='test name 1')
queryset = ExampleModel.objects.all()
class DT(Datatable):
name = TextColumn('Name', sources=['name'])
class Meta:
model = ExampleModel
columns = ['name']
ordering = ['pk']
dt = DT(queryset, '/', query_config={'order[0][column]': '0', 'order[0][dir]': 'asc'}) # 'iSortingCols': '1',
dt.populate_records()
self.assertEqual(dt.get_ordering_splits(), (['name'], []))
self.assertEqual(list(dt._records), [obj2, obj1])
dt = DT(queryset, '/', query_config={'order[0][column]': '0', 'order[0][dir]': 'desc'}) # 'iSortingCols': '1',
dt.populate_records()
self.assertEqual(dt.get_ordering_splits(), (['-name'], []))
self.assertEqual(list(dt._records), [obj1, obj2])
def test_sort_uses_all_sources(self):
from datetime import timedelta
obj1 = ExampleModel.objects.create(name='a')
obj2 = ExampleModel.objects.create(name='a')
obj3 = ExampleModel.objects.create(name='b')
obj1.date_created = obj1.date_created + timedelta(days=3)
obj2.date_created = obj2.date_created + timedelta(days=1)
obj3.date_created = obj3.date_created + timedelta(days=2)
obj1.save()
obj2.save()
obj3.save()
queryset = ExampleModel.objects.all()
class DT(Datatable):
my_column = TextColumn('Data', sources=['name', 'date_created', 'pk'])
class Meta:
model = ExampleModel
columns = ['my_column']
dt = DT(queryset, '/', query_config={'order[0][column]': '0', 'order[0][dir]': 'asc'}) # 'iSortingCols': '1',
dt.populate_records()
self.assertEqual(dt.get_ordering_splits(), (['my_column'], []))
self.assertEqual(list(dt._records), [obj2, obj1, obj3])
dt = DT(queryset, '/', query_config={'order[0][column]': '0', 'order[0][dir]': 'desc'}) # 'iSortingCols': '1',
dt.populate_records()
self.assertEqual(dt.get_ordering_splits(), (['-my_column'], []))
self.assertEqual(list(dt._records), [obj3, obj1, obj2])
# Swap the order of 'date_created' and 'name' fields in the sources, which will alter the
# sort results.
class DT(Datatable):
my_column = TextColumn('Data', sources=['date_created', 'name', 'pk'])
class Meta:
model = ExampleModel
columns = ['my_column']
dt = DT(queryset, '/', query_config={'order[0][column]': '0', 'order[0][dir]': 'asc'}) # 'iSortingCols': '1',
dt.populate_records()
self.assertEqual(dt.get_ordering_splits(), (['my_column'], []))
self.assertEqual(list(dt._records), [obj2, obj3, obj1])
dt = DT(queryset, '/', query_config={'order[0][column]': '0', 'order[0][dir]': 'desc'}) # 'iSortingCols': '1',
dt.populate_records()
self.assertEqual(dt.get_ordering_splits(), (['-my_column'], []))
self.assertEqual(list(dt._records), [obj1, obj3, obj2])
def test_sort_ignores_virtual_sources_when_mixed(self):
from datetime import timedelta
obj1 = ExampleModel.objects.create(name='a')
obj2 = ExampleModel.objects.create(name='b')
obj3 = ExampleModel.objects.create(name='a')
queryset = ExampleModel.objects.all()
class DT(Datatable):
my_column = TextColumn('Data', sources=['name', 'get_absolute_url'])
class Meta:
model = ExampleModel
columns = ['my_column']
dt = DT(queryset, '/', query_config={'order[0][column]': '0', 'order[0][dir]': 'asc'}) # 'iSortingCols': '1',
dt.populate_records()
self.assertEqual(dt.get_ordering_splits(), (['my_column'], []))
self.assertEqual(list(dt._records), [obj1, obj3, obj2])
dt = DT(queryset, '/', query_config={'order[0][column]': '0', 'order[0][dir]': 'desc'}) # 'iSortingCols': '1',
dt.populate_records()
self.assertEqual(dt.get_ordering_splits(), (['-my_column'], []))
self.assertEqual(list(dt._records), [obj2, obj1, obj3]) # pk is natural ordering 1,3 here
# Swap the sources order, but we expect the same result
class DT(Datatable):
my_column = TextColumn('Data', sources=['get_absolute_url', 'name'], processor='get_data')
class Meta:
model = ExampleModel
columns = ['my_column']
def get_data(self, obj, **kwargs):
# Return data that would make the sort order wrong if it were consulted for sorting
return obj.pk # tracks with get_absolute_url
dt = DT(queryset, '/', query_config={'order[0][column]': '0', 'order[0][dir]': 'asc'}) # 'iSortingCols': '1',
dt.populate_records()
self.assertEqual(list(dt._records), [obj1, obj3, obj2])
dt = DT(queryset, '/', query_config={'order[0][column]': '0', 'order[0][dir]': 'desc'}) # 'iSortingCols': '1',
dt.populate_records()
self.assertEqual(list(dt._records), [obj2, obj1, obj3]) # pk is natural ordering 1,3 here
def test_sort_uses_virtual_sources_when_no_db_sources_available(self):
from datetime import timedelta
obj1 = ExampleModel.objects.create(name='a')
obj2 = ExampleModel.objects.create(name='b')
obj3 = ExampleModel.objects.create(name='c')
queryset = ExampleModel.objects.all()
class DT(Datatable):
pk = TextColumn('Data', sources=['get_negative_pk'])
class Meta:
model = ExampleModel
columns = ['pk']
dt = DT(queryset, '/', query_config={'order[0][column]': '0', 'order[0][dir]': 'asc'}) # 'iSortingCols': '1',
dt.populate_records()
self.assertEqual(dt.get_ordering_splits(), ([], ['pk']))
self.assertEqual(list(dt._records), [obj3, obj2, obj1])
dt = DT(queryset, '/', query_config={'order[0][column]': '0', 'order[0][dir]': 'desc'}) # 'iSortingCols': '1',
dt.populate_records()
self.assertEqual(dt.get_ordering_splits(), ([], ['-pk']))
self.assertEqual(list(dt._records), [obj1, obj2, obj3])
def test_get_object_pk(self):
obj1 = ExampleModel.objects.create(name='test name 1')
queryset = ExampleModel.objects.all()
dt = Datatable(queryset, '/')
self.assertEqual(dt.get_object_pk(obj1), obj1.pk)
def test_get_extra_record_data_passes_through_to_object_serialization(self):
obj1 = ExampleModel.objects.create(name='test name 1')
queryset = ExampleModel.objects.all()
class DT(Datatable):
def get_extra_record_data(self, obj):
return {'custom': 'data'}
dt = DT([], '/')
data = dt.get_record_data(obj1)
self.assertIn('_extra_data', data)
self.assertIn('custom', data['_extra_data'])
self.assertEqual(data['_extra_data']['custom'], 'data')
def test_get_extra_record_data_passes_through_to_json_response(self):
obj1 = ExampleModel.objects.create(name='test name 1')
queryset = ExampleModel.objects.all()
class DT(Datatable):
def get_extra_record_data(self, obj):
return {'custom': 'data'}
class FakeRequest(object):
method = 'GET'
GET = {'sEcho': 0}
dt = DT(queryset, '/')
view = DatatableJSONResponseMixin()
view.request = FakeRequest()
data = view.get_json_response_object(dt)
self.assertIn('data', data)
self.assertIn('DT_RowData', data['data'][0])
self.assertEqual(data['data'][0]['DT_RowData'], {'custom': 'data'})
def test_get_column_value_forwards_to_column_class(self):
class CustomColumn1(Column):
def value(self, obj, **kwargs):
return 'first'
class CustomColumn2(Column):
def value(self, obj, **kwargs):
return 'second'
class DT(Datatable):
fake1 = CustomColumn1('Fake1', sources=['get_absolute_url'])
fake2 = CustomColumn2('Fake2', sources=['get_absolute_url'])
class Meta:
model = ExampleModel
columns = ['name', 'fake1', 'fake2']
obj1 = ExampleModel.objects.create(name='test name 1')
queryset = ExampleModel.objects.all()
dt = DT(queryset, '/')
data = dt.get_record_data(obj1)
self.assertIn('1', data)
self.assertIn(data['1'], 'first')
self.assertIn('2', data)
self.assertIn(data['2'], 'second')
def test_get_processor_method(self):
class Dummy(object):
def fake_callback(self):
pass
view = Dummy()
# Test no callback given
dt = Datatable([], '/')
f = dt.get_processor_method(Column('Fake', sources=['fake']), i=0)
self.assertEqual(f, None)
class DT(Datatable):
def fake_callback(self):
pass
column = Column('Fake', sources=['fake'], processor='fake_callback')
# Test callback found on self
dt = DT([], '/')
f = dt.get_processor_method(column, i=0)
self.assertEqual(f, dt.fake_callback)
# Test callback found on callback_target
dt = Datatable([], '/', callback_target=view)
f = dt.get_processor_method(column, i=0)
self.assertEqual(f, view.fake_callback)
def test_get_processor_method_returns_direct_callable(self):
def fake_callback():
pass
column = Column('Fake', sources=[], processor=fake_callback)
# Test no callback given
dt = Datatable([], '/')
f = dt.get_processor_method(column, i=0)
self.assertEqual(f, fake_callback)
def test_get_processor_method_finds_implied_callback(self):
class DummyNamed(object):
def get_column_fake_data(self):
pass
class DummyIndexed(object):
def get_column_0_data(self):
pass
class DummyBoth(object):
def get_column_fake_data(self):
pass
def get_column_0_data(self):
pass
column = Column('Fake', sources=[])
column.name = 'fake'
# Test implied named callback found first
view = DummyNamed()
dt = Datatable([], '/', callback_target=view)
f = dt.get_processor_method(column, i=0)
self.assertEqual(f, view.get_column_fake_data)
# Test implied named callback found first
view = DummyIndexed()
dt = Datatable([], '/', callback_target=view)
f = dt.get_processor_method(column, i=0)
self.assertEqual(f, view.get_column_0_data)
# Test implied named callback found first
view = DummyBoth()
dt = Datatable([], '/', callback_target=view)
f = dt.get_processor_method(column, i=0)
self.assertEqual(f, view.get_column_fake_data)
class DTNamed(Datatable):
def get_column_fake_data(self):
pass
class DTIndexed(Datatable):
def get_column_0_data(self):
pass
class DTBoth(Datatable):
def get_column_fake_data(self):
pass
def get_column_0_data(self):
pass
# Test implied named callback found first
dt = DTNamed([], '/')
f = dt.get_processor_method(column, i=0)
self.assertEqual(f, dt.get_column_fake_data)
# Test implied named callback found first
dt = DTIndexed([], '/')
f = dt.get_processor_method(column, i=0)
self.assertEqual(f, dt.get_column_0_data)
# Test implied named callback found first
dt = DTBoth([], '/')
f = dt.get_processor_method(column, i=0)
self.assertEqual(f, dt.get_column_fake_data)
def test_iter_datatable_yields_columns(self):
class CustomColumn1(Column):
pass
class CustomColumn2(Column):
pass
class DT(Datatable):
fake1 = CustomColumn1('Fake1', sources=['get_absolute_url'])
fake2 = CustomColumn2('Fake2', sources=['get_absolute_url'])
class Meta:
model = ExampleModel
columns = ['name', 'fake1', 'fake2']
dt = DT([], '/')
self.assertEqual(isgenerator(dt.__iter__()), True)
self.assertEqual(list(dt), [dt.columns['name'], dt.columns['fake1'], dt.columns['fake2']])
def test_search_term_basic(self):
obj1 = ExampleModel.objects.create(name='test name 1')
obj2 = ExampleModel.objects.create(name='test name 2')
obj3 = ExampleModel.objects.create(name='test name 12')
queryset = ExampleModel.objects.all()
class DT(Datatable):
class Meta:
model = ExampleModel
columns = ['name']
dt = DT(queryset, '/', query_config={'search[value]': 'test'})
dt.populate_records()
self.assertEqual(list(dt._records), [obj1, obj2, obj3])
dt = DT(queryset, '/', query_config={'search[value]': 'name'})
dt.populate_records()
self.assertEqual(list(dt._records), [obj1, obj2, obj3])
dt = DT(queryset, '/', query_config={'search[value]': '1'})
dt.populate_records()
self.assertEqual(list(dt._records), [obj1, obj3])
dt = DT(queryset, '/', query_config={'search[value]': '2'})
dt.populate_records()
self.assertEqual(list(dt._records), [obj2, obj3])
dt = DT(queryset, '/', query_config={'search[value]': '12'})
dt.populate_records()
self.assertEqual(list(dt._records), [obj3])
dt = DT(queryset, '/', query_config={'search[value]': '3'})
dt.populate_records()
self.assertEqual(list(dt._records), [])
def test_search_term_boolean(self):
obj1 = ExampleModel.objects.create(name='test name 1', value=True)
obj2 = ExampleModel.objects.create(name='test name 2', value=True)
obj3 = ExampleModel.objects.create(name='test name 12', value=False)
queryset = ExampleModel.objects.all()
class DT(Datatable):
senior = BooleanColumn('Senior:', 'value')
class Meta:
model = ExampleModel
columns = ['name', 'senior']
dt = DT(queryset, '/', query_config={'search[value]': 'True'})
dt.populate_records()
self.assertEqual(len(list(dt._records)), 2)
dt = DT(queryset, '/', query_config={'search[value]': 'false'})
dt.populate_records()
self.assertEqual(len(list(dt._records)), 1)
dt = DT(queryset, '/', query_config={'search[value]': 'SENIOR'})
dt.populate_records()
self.assertEqual(len(list(dt._records)), 2)
dt = DT(queryset, '/', query_config={'search[value]': 'menior'})
dt.populate_records()
self.assertEqual(len(list(dt._records)), 0)
def test_search_multiple_terms_use_AND(self):
obj1 = ExampleModel.objects.create(name='test name 1')
obj2 = ExampleModel.objects.create(name='test name 2')
obj3 = ExampleModel.objects.create(name='test name 12')
queryset = ExampleModel.objects.all()
class DT(Datatable):
class Meta:
model = ExampleModel
columns = ['name']
dt = DT(queryset, '/', query_config={'search[value]': 'test name'})
dt.populate_records()
self.assertEqual(list(dt._records), [obj1, obj2, obj3])
dt = DT(queryset, '/', query_config={'search[value]': 'test 1'})
dt.populate_records()
self.assertEqual(list(dt._records), [obj1, obj3])
dt = DT(queryset, '/', query_config={'search[value]': 'test 2'})
dt.populate_records()
self.assertEqual(list(dt._records), [obj2, obj3])
dt = DT(queryset, '/', query_config={'search[value]': 'test 12'})
dt.populate_records()
self.assertEqual(list(dt._records), [obj3])
dt = DT(queryset, '/', query_config={'search[value]': 'test 3'})
dt.populate_records()
self.assertEqual(list(dt._records), [])
def test_search_term_queries_all_columns(self):
r1 = RelatedModel.objects.create(name='test related 1 one')
r2 = RelatedModel.objects.create(name='test related 2 two')
obj1 = ExampleModel.objects.create(name='test name 1', related=r1)
obj2 = ExampleModel.objects.create(name='test name 2', related=r2)
queryset = ExampleModel.objects.all()
class DT(Datatable):
related = TextColumn('Related', ['related__name'])
class Meta:
model = ExampleModel
columns = ['name', 'related']
dt = DT(queryset, '/', query_config={'search[value]': 'test'})
dt.populate_records()
self.assertEqual(list(dt._records), [obj1, obj2])
dt = DT(queryset, '/', query_config={'search[value]': 'test name'})
dt.populate_records()
self.assertEqual(list(dt._records), [obj1, obj2])
dt = DT(queryset, '/', query_config={'search[value]': 'test 2'})
dt.populate_records()
self.assertEqual(list(dt._records), [obj2])
dt = DT(queryset, '/', query_config={'search[value]': 'related 2'})
dt.populate_records()
self.assertEqual(list(dt._records), [obj2])
dt = DT(queryset, '/', query_config={'search[value]': 'test one'})
dt.populate_records()
self.assertEqual(list(dt._records), [obj1])
dt = DT(queryset, '/', query_config={'search[value]': '2 two'})
dt.populate_records()
self.assertEqual(list(dt._records), [obj2])
dt = DT(queryset, '/', query_config={'search[value]': 'test three'})
dt.populate_records()
self.assertEqual(list(dt._records), [])
def test_search_term_queries_extra_fields(self):
r1 = RelatedModel.objects.create(name='test related 1 one')
r2 = RelatedModel.objects.create(name='test related 2 two')
obj1 = ExampleModel.objects.create(name='test name 1', related=r1)
obj2 = ExampleModel.objects.create(name='test name 2', related=r2)
queryset = ExampleModel.objects.all()
class DT(Datatable):
related = TextColumn('Related', ['related__name'])
class Meta:
model = ExampleModel
columns = ['related']
search_fields = ['name']
dt = DT(queryset, '/', query_config={'search[value]': 'test'})
dt.populate_records()
self.assertEqual(list(dt._records), [obj1, obj2])
dt = DT(queryset, '/', query_config={'search[value]': 'test name 2'})
dt.populate_records()
self.assertEqual(list(dt._records), [obj2])
class ValuesDatatableTests(DatatableViewTestCase):
def test_get_object_pk(self):
obj1 = ExampleModel.objects.create(name='test name 1')
queryset = ExampleModel.objects.all()
dt = ValuesDatatable(queryset, '/')
obj_data = queryset.values('pk')[0]
self.assertEqual(dt.get_object_pk(obj_data), obj1.pk)
| test_sort_defaults_to_meta_ordering |
stz2.rs | use std::io;
use crate::boxes::prelude::*;
def_box! {
/// 8.7.3.3 Compact Sample Size Box (ISO/IEC 14496-12:2015(E))
CompactSampleSizeBox {
// skip: 3.
field_size: u8,
count: u32,
entries: {Vec<u16>},
},
fourcc => "stz2",
version => [0],
impls => [ boxinfo, debug, fullbox ],
}
impl FromBytes for CompactSampleSizeBox {
fn from_bytes<R: ReadBytes>(stream: &mut R) -> io::Result<CompactSampleSizeBox> {
let mut reader = BoxReader::new(stream)?;
let stream = &mut reader;
stream.skip(3)?;
let field_size = u8::from_bytes(stream)?;
let count = u32::from_bytes(stream)?;
let mut entries = Vec::new();
while entries.len() < count as usize {
if field_size == 4 {
let b = u8::from_bytes(stream)?;
let hi = (b & 0xf0) >> 4;
let lo = b & 0x0f;
entries.push(hi as u16);
if entries.len() < count as usize {
entries.push(lo as u16);
}
}
if field_size == 8 {
entries.push(u8::from_bytes(stream)? as u16);
}
if field_size == 16 {
entries.push(u16::from_bytes(stream)?);
}
}
Ok(CompactSampleSizeBox {
field_size,
count,
entries,
})
}
fn min_size() -> usize { 16 }
}
impl ToBytes for CompactSampleSizeBox {
fn to_bytes<W: WriteBytes>(&self, stream: &mut W) -> io::Result<()> {
let mut writer = BoxWriter::new(stream, self)?;
let stream = &mut writer;
(self.field_size as u32).to_bytes(stream)?;
(self.entries.len() as u32).to_bytes(stream)?;
let mut i = 0;
while i < self.entries.len() {
match self.field_size {
4 => {
let mut b: u8 = ((self.entries[i] & 0xf) as u8) << 4;
i += 1;
if i < self.entries.len() {
b |= (self.entries[i] & 0xf) as u8;
i += 1;
} | },
8 => {
let b: u8 = (self.entries[i] & 0xff) as u8;
i += 1;
b.to_bytes(stream)?;
},
16 => {
let b = self.entries[i];
i += 1;
b.to_bytes(stream)?;
},
_ => break,
}
}
stream.finalize()
}
} | b.to_bytes(stream)?; |
call-function.ts | import { Node } from "babylonjs";
import { LiteGraph } from "litegraph.js";
import { GraphNode, ICodeGenerationOutput, CodeGenerationOutputType } from "../node";
export class | extends GraphNode<{ function: string; cast_as_any: boolean; }> {
/**
* Constructor.
*/
public constructor() {
super("Call Node Function");
this.addInput("", LiteGraph.EVENT as any);
this.addInput("Node *", "Node", { linkedOutput: "Node" });
this.addInput("Arg", "");
this.addProperty("function", "myFn", "string");
this.addProperty("cast_as_any", true, "boolean");
this.addWidget("text", "function", this.properties.function, (v) => this.properties.function = v);
this.addWidget("toggle", "cast_as_any", this.properties.cast_as_any, (v) => this.properties.cast_as_any = v);
this.addOutput("", LiteGraph.EVENT as any);
this.addOutput("Node", "Node");
}
/**
* Called on the node is being executed.
*/
public async execute(): Promise<void> {
const node = this.getInputData<Node>(1);
if (!node) { return; }
if (node[this.properties.function]) {
node[this.properties.function](this.getInputData(2));
this.setOutputData(1, this.getInputData(1));
return this.triggerSlot(0, null);
}
}
/**
* Generates the code of the graph.
*/
public generateCode(mesh: ICodeGenerationOutput, value?: ICodeGenerationOutput): ICodeGenerationOutput {
const code = `${this.properties.cast_as_any ? `(${mesh.code} as any)` : mesh.code}.${this.properties.function}(${value?.code ?? ""});`;
return {
type: CodeGenerationOutputType.Function,
code,
outputsCode: [
{ code: undefined },
{ code: mesh.code },
],
};
}
}
| CallNodeFunction |
polyfill.ts | if (shouldPolyfill()) {
Object.defineProperty(Intl, 'RelativeTimeFormat', {
value: RelativeTimeFormat,
writable: true,
enumerable: false,
configurable: true,
});
} | import RelativeTimeFormat from './';
import {shouldPolyfill} from './should-polyfill'; |
|
increase_position.rs | use std::{slice::Iter, str::FromStr};
use solana_program::{
account_info::{next_account_info, AccountInfo},
clock::Clock,
entrypoint::ProgramResult,
msg,
program_error::ProgramError,
program_pack::Pack,
pubkey::Pubkey,
sysvar::Sysvar,
};
use crate::{
error::PerpError,
positions_book::{memory::parse_memory, positions_book_tree::PositionsBook},
processor::{MAX_LEVERAGE, MAX_POSITION_SIZE},
state::{
instance::{parse_instance, write_instance_and_memory}, | utils::{
check_account_key, check_account_owner, check_signer, compute_fee_tier, compute_fees,
compute_liquidation_index, get_oracle_price,
},
};
use super::{FIDA_BNB, TRADE_LABEL};
pub struct Accounts<'a, 'b: 'a> {
spl_token_program: &'a AccountInfo<'b>,
clock_sysvar: &'a AccountInfo<'b>,
market: &'a AccountInfo<'b>,
market_signer: &'a AccountInfo<'b>,
market_vault: &'a AccountInfo<'b>,
bnb_bonfida: &'a AccountInfo<'b>,
instance: &'a AccountInfo<'b>,
user_account_owner: &'a AccountInfo<'b>,
user_account: &'a AccountInfo<'b>,
oracle: &'a AccountInfo<'b>,
remaining: Iter<'a, AccountInfo<'b>>,
}
impl<'a, 'b: 'a> Accounts<'a, 'b> {
pub fn parse(
program_id: &Pubkey,
accounts: &'a [AccountInfo<'b>],
) -> Result<Self, ProgramError> {
let mut accounts_iter = accounts.iter();
let spl_token_program = next_account_info(&mut accounts_iter)?;
let clock_sysvar = next_account_info(&mut accounts_iter)?;
let market = next_account_info(&mut accounts_iter)?;
let market_signer = next_account_info(&mut accounts_iter)?;
let market_vault = next_account_info(&mut accounts_iter)?;
let bnb_bonfida = next_account_info(&mut accounts_iter)?;
let instance = next_account_info(&mut accounts_iter)?;
let user_account_owner = next_account_info(&mut accounts_iter)?;
let user_account = next_account_info(&mut accounts_iter)?;
let label = next_account_info(&mut accounts_iter)?;
let oracle = next_account_info(&mut accounts_iter)?;
check_account_key(label, &Pubkey::from_str(TRADE_LABEL).unwrap()).unwrap();
check_account_key(spl_token_program, &spl_token::id()).unwrap();
check_account_key(clock_sysvar, &solana_program::sysvar::clock::ID).unwrap();
check_account_key(bnb_bonfida, &Pubkey::from_str(&FIDA_BNB).unwrap()).unwrap();
check_signer(user_account_owner).unwrap();
check_account_owner(user_account, program_id).unwrap();
check_account_owner(market, program_id).unwrap();
Ok(Self {
spl_token_program,
clock_sysvar,
market,
market_signer,
market_vault,
bnb_bonfida,
instance,
user_account_owner,
user_account,
oracle,
remaining: accounts_iter,
})
}
}
#[allow(clippy::too_many_arguments)]
pub fn process_increase_position(
program_id: &Pubkey,
accounts: &[AccountInfo<'_>],
instance_index: u8,
leverage: u64, // 32 bit FP
position_index: u16,
add_collateral: u64,
predicted_entry_price: u64, // 32 bit FP
maximum_slippage_margin: u64, // 32 bit FP
) -> ProgramResult {
let mut accounts = Accounts::parse(program_id, accounts)?;
// Parsing
let mut market_state = MarketState::unpack_from_slice(&accounts.market.data.borrow())?;
msg!(
"Market_state before: v_coin {:?} - v_pc {:?}",
market_state.v_coin_amount,
market_state.v_pc_amount
);
market_state.slippage_protection(predicted_entry_price, maximum_slippage_margin)?;
let mut user_account_header =
UserAccountState::unpack_from_slice(&accounts.user_account.data.borrow())?;
let instance_address =
get_instance_address(&accounts.market.data.borrow(), instance_index as u32)?;
if &instance_address != accounts.instance.key {
msg!("Invalid instance account or instance index provided");
return Err(ProgramError::InvalidArgument);
}
let (mut instance, mut page_infos) = parse_instance(&accounts.instance.data.borrow())?;
let memory = parse_memory(&instance, &page_infos, &mut accounts.remaining)?;
let mut book = PositionsBook::new(instance.shorts_pointer, instance.longs_pointer, memory);
let mut open_position = get_position(
&accounts.user_account.data.borrow_mut(),
&user_account_header,
position_index,
)?;
// Verifications
if leverage > MAX_LEVERAGE {
msg!(
"New leverage cannot be higher than: {:?}. Found: {:?}",
MAX_LEVERAGE >> 32,
leverage >> 32
);
return Err(PerpError::MarginTooLow.into());
}
if *accounts.user_account_owner.key != Pubkey::new_from_array(user_account_header.owner) {
msg!("The open position is not correctly configured");
return Err(ProgramError::InvalidArgument);
}
if &Pubkey::new(&user_account_header.market) != accounts.market.key {
msg!("The user account market doesn't match the given market account");
return Err(ProgramError::InvalidArgument);
}
if user_account_header.balance < add_collateral {
msg!("The user budget is not sufficient");
return Err(PerpError::NoMoreFunds.into());
}
if user_account_header.last_funding_offset != market_state.funding_history_offset {
msg!("Funding must be processed for this account.");
return Err(PerpError::PendingFunding.into());
}
if market_state.oracle_address != accounts.oracle.key.to_bytes() {
msg!("Provided oracle account is incorrect.");
return Err(ProgramError::InvalidArgument);
}
user_account_header.balance -= add_collateral;
market_state.total_collateral += add_collateral;
market_state.total_user_balances -= add_collateral;
// Calculations
book.close_position(
open_position.liquidation_index,
open_position.collateral,
open_position.v_coin_amount,
open_position.v_pc_amount,
open_position.side,
open_position.slot_number,
)?;
let add_v_pc_amount = (((add_collateral as u128) * (leverage as u128)) >> 32) as u64;
let add_v_pc_amount_signed = open_position.side.get_sign() * (add_v_pc_amount as i64);
let add_v_coin_amount = market_state.compute_add_v_coin(add_v_pc_amount_signed)?;
let new_collateral = add_collateral + open_position.collateral;
let new_v_pc_amount = add_v_pc_amount + open_position.v_pc_amount;
let new_v_coin_amount = (add_v_coin_amount.abs() as u64) + open_position.v_coin_amount;
msg!(
"Transaction info: v_coin_amount {:?}, v_pc_amount {:?}",
add_v_coin_amount.abs(),
add_v_pc_amount
);
if add_v_pc_amount >= market_state.v_pc_amount && open_position.side == PositionType::Long {
msg!("The given order size is too large!");
return Err(PerpError::AmountTooLarge.into());
}
if new_v_coin_amount >= MAX_POSITION_SIZE {
msg!(
"The given order size is too large! The maximum size is: {:?}",
MAX_POSITION_SIZE
);
return Err(PerpError::AmountTooLarge.into());
}
msg!("Add_v_pc_amount: {:?}", add_v_pc_amount_signed);
msg!("Add_v_coin_amount: {:?}", add_v_coin_amount);
msg!(
"Mark price for this transaction (FP32): {:?}, with size: {:?} and side {:?}",
((add_v_pc_amount as u128) << 32)
.checked_div(add_v_coin_amount.abs() as u128)
.unwrap_or(0),
add_v_coin_amount.abs(),
open_position.side
);
let new_liquidation_index = compute_liquidation_index(
new_collateral,
new_v_coin_amount,
new_v_pc_amount,
open_position.side,
market_state.get_k(),
);
msg!(
"Liquidation index for this position: {:?}",
new_liquidation_index
);
let current_slot = Clock::from_account_info(accounts.clock_sysvar)?.slot;
let insertion_leaf = book.open_position(
new_liquidation_index,
new_collateral,
new_v_coin_amount,
new_v_pc_amount,
open_position.side,
current_slot,
)?;
let oracle_price = get_oracle_price(
&accounts.oracle.data.borrow(),
market_state.coin_decimals,
market_state.quote_decimals,
)?;
if open_position.side.get_sign() * ((new_liquidation_index as i64) - (oracle_price as i64)) >= 0
{
msg!("This position is preliquidated");
return Err(PerpError::MarginTooLow.into());
}
let (balanced_v_pc_amount, balanced_v_coin_amount) =
market_state.balance_operation(add_v_pc_amount_signed, add_v_coin_amount, oracle_price)?;
// Update the market state
market_state.add_v_pc(balanced_v_pc_amount)?;
market_state.add_v_coin(balanced_v_coin_amount)?;
market_state.add_open_interest(
add_v_coin_amount.abs() as u64,
add_v_pc_amount,
open_position.side,
)?;
// Fees
let fee_tier = compute_fee_tier(&mut accounts.remaining)?;
let mut fees = compute_fees(fee_tier, add_v_pc_amount, leverage)?;
let referrer_account_opt = next_account_info(&mut accounts.remaining).ok();
market_state.transfer_fees(
&mut fees,
accounts.spl_token_program,
accounts.market,
accounts.market_vault,
accounts.market_signer,
accounts.bnb_bonfida,
referrer_account_opt,
)?;
market_state.apply_fees(&fees, false, false)?;
if user_account_header.balance < fees.fixed {
msg!("The user does not have the funds or the payout to pay the fees");
return Err(PerpError::NoMoreFunds.into());
}
user_account_header.balance -= fees.fixed;
// Update the open positions account
open_position.collateral = new_collateral;
open_position.liquidation_index = new_liquidation_index;
open_position.slot_number = insertion_leaf.get_slot_number(&book.memory)?;
open_position.v_coin_amount = new_v_coin_amount;
open_position.v_pc_amount = new_v_pc_amount;
msg!(
"Market_state after: v_coin {:?} - v_pc {:?}",
market_state.v_coin_amount,
market_state.v_pc_amount
);
write_position(
&mut accounts.user_account.data.borrow_mut(),
position_index,
&mut user_account_header,
&open_position,
true,
)?;
user_account_header.pack_into_slice(&mut accounts.user_account.data.borrow_mut());
instance.update(&book, &mut page_infos);
write_instance_and_memory(
&mut accounts.instance.data.borrow_mut(),
&page_infos,
&instance,
)?;
market_state.pack_into_slice(&mut accounts.market.data.borrow_mut());
Ok(())
} | market::{get_instance_address, MarketState},
user_account::{get_position, write_position},
},
state::{user_account::UserAccountState, PositionType}, |
mod.rs | use crate::arango_response::ResponseExtra;
use serde::{Deserialize, Serialize};
mod arango_mock;
#[allow(unused_imports)] // used in test
pub use arango_mock::*;
#[allow(clippy::module_name_repetitions)]
#[derive(Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct TestResponse {
#[serde(default = "Vec::new")]
pub result: Vec<serde_json::Value>,
#[serde(rename = "hasMore", default)]
pub has_more: bool,
#[serde(default)]
pub cached: bool,
#[serde(default)]
pub extra: ResponseExtra,
#[serde(default)]
pub error: bool,
#[serde(default)]
pub code: u16,
#[serde(rename = "errorMessage", skip_serializing_if = "String::is_empty", default)]
pub error_message: String,
#[serde(rename = "errorNum", skip_serializing, default)]
pub error_num: u64,
#[serde(skip_serializing_if = "String::is_empty", default)]
pub id: String,
}
impl TestResponse {
#[must_use]
pub fn new() -> Self {
TestResponse::default()
}
#[must_use]
pub fn with_results<T: Serialize>(data: &[T]) -> Self |
#[must_use]
pub fn with_code(code: u16) -> Self {
let mut res = TestResponse::default();
res.code = code;
res.extra.stats.execution_time = 0.000_365_495_681_762_695_3;
res.extra.stats.peak_memory_usage = 2019;
res
}
}
impl<T: Serialize> From<crate::arango_response::ArangoResponse<T>> for TestResponse {
fn from(ar: crate::arango_response::ArangoResponse<T>) -> Self {
let result: Vec<serde_json::Value> =
ar.result.iter().map(|t| serde_json::to_value(t).unwrap()).collect();
TestResponse {
result,
has_more: ar.has_more,
cached: ar.cached,
extra: ar.extra,
error: ar.error,
code: ar.code,
error_message: ar.error_message,
error_num: ar.error_num,
id: ar.id,
}
}
}
#[cfg(test)]
mod tests {
// Note this useful idiom: importing names from outer (for mod tests) scope.
use super::TestResponse;
use serde::Serialize;
#[derive(Serialize, Default)]
struct TestData {
name: String,
}
#[test]
fn test_results() {
let t = TestResponse::new();
let r = serde_json::to_string(&t).unwrap();
assert_eq!(
r#"{"result":[],"hasMore":false,"cached":false,"extra":{"stats":{"writesExecuted":0,"writesIgnored":0,"scannedFull":0,"scannedIndex":0,"filtered":0,"httpRequests":0,"executionTime":0.0,"fullCount":0,"peakMemoryUsage":0},"warnings":[]},"error":false,"code":0}"#,
r
);
// let t = TestResponse::with_results(vec!(TestData{ name: "John Doe".to_owned()}, TestData{ name: "Teszt Elek".to_owned()}));
// let r = serde_json::to_string(&t).unwrap();
// assert_eq!(
// r#"{"result":[{"name":"John Doe"},{"name":"Teszt Elek"}],"hasMore":false,"cached":false,"extra":{"stats":{"writesExecuted":0,"writesIgnored":0,"scannedFull":0,"scannedIndex":0,"filtered":0,"httpRequests":0,"executionTime":0.0003654956817626953,"peakMemoryUsage":2019},"warnings":[]},"error":false,"code":201}"#,
// r);
let t = TestResponse::with_code(401);
let r = serde_json::to_string(&t).unwrap();
assert_eq!(
r#"{"result":[],"hasMore":false,"cached":false,"extra":{"stats":{"writesExecuted":0,"writesIgnored":0,"scannedFull":0,"scannedIndex":0,"filtered":0,"httpRequests":0,"executionTime":0.0003654956817626953,"fullCount":0,"peakMemoryUsage":2019},"warnings":[]},"error":false,"code":401}"#,
r
);
}
}
| {
let mut res = TestResponse::default();
res.result = data.iter().map(|t| serde_json::to_value(t).unwrap()).collect();
res.code = 201;
res.extra.stats.execution_time = 0.000_365_495_681_762_695_3;
res.extra.stats.peak_memory_usage = 2109;
res
} |
BooleanExpression.ts | import { ContextKeyExpr, ContextKeyExpression } from './contextkey/contextkey';
export default class BooleanExpression {
private expression_:string;
private rules_:ContextKeyExpression = null;
constructor(expression:string) {
this.expression_ = expression;
}
private createContext(ctx: any) {
return {
getValue: (key: string) => {
return ctx[key];
},
};
}
private get rules():ContextKeyExpression {
if (!this.rules_) {
this.rules_ = ContextKeyExpr.deserialize(this.expression_);
}
return this.rules_;
}
public evaluate(context:any):boolean {
return this.rules.evaluate(this.createContext(context)); | }
} |
|
validate_test.go | package gogroup
import (
"strings"
"testing"
"github.com/stretchr/testify/assert"
)
type vopts struct {
invalid bool
verrstr string
err bool
}
func testValidate(t *testing.T, g Grouper, opts vopts, imports string) {
proc := NewProcessor(g)
text := "package main\n" + imports
errValid, err := proc.Validate("", strings.NewReader(text))
if opts.err {
assert.NotNil(t, err)
} else {
assert.Nil(t, err)
}
if opts.invalid || opts.verrstr != "" {
assert.NotNil(t, errValid)
if opts.verrstr != "" {
assert.Contains(t, errValid.Error(), opts.verrstr)
}
} else {
assert.Nil(t, errValid)
}
}
func TestValidateGroupers(t *testing.T) {
t.Parallel()
// No imports statement.
testValidate(t, grouperCombined{}, vopts{}, "")
testValidate(t, grouperGoimports{}, vopts{}, "")
testValidate(t, grouperLocalMiddle{}, vopts{}, "")
testValidate(t, grouperWeird{}, vopts{}, "")
// Just one import.
imports := `import "os"`
testValidate(t, grouperCombined{}, vopts{}, imports)
testValidate(t, grouperGoimports{}, vopts{}, imports)
testValidate(t, grouperLocalMiddle{}, vopts{}, imports)
testValidate(t, grouperWeird{}, vopts{}, imports)
// Multiple imports in same group, ordered ok.
imports = `import (
"os"
"strings"
"testing"
)`
testValidate(t, grouperCombined{}, vopts{}, imports)
testValidate(t, grouperGoimports{}, vopts{}, imports)
testValidate(t, grouperLocalMiddle{}, vopts{}, imports)
testValidate(t, grouperWeird{}, vopts{}, imports)
// Multiple imports in same group, ordered poorly.
imports = `import (
"strings"
"os"
)`
testValidate(t, grouperCombined{}, vopts{verrstr: errstrStatementOrder}, imports)
testValidate(t, grouperGoimports{}, vopts{verrstr: errstrStatementOrder}, imports)
testValidate(t, grouperLocalMiddle{}, vopts{verrstr: errstrStatementOrder}, imports)
testValidate(t, grouperWeird{}, vopts{verrstr: errstrStatementOrder}, imports)
// Imports grouped together.
imports = `import (
"github.com/Sirupsen/logrus"
"os"
)`
testValidate(t, grouperCombined{}, vopts{}, imports)
testValidate(t, grouperGoimports{}, vopts{invalid: true}, imports)
testValidate(t, grouperLocalMiddle{}, vopts{invalid: true}, imports)
testValidate(t, grouperWeird{}, vopts{invalid: true}, imports)
// Std/other separated.
imports = `import (
"os"
"github.com/Sirupsen/logrus"
)`
testValidate(t, grouperCombined{}, vopts{invalid: true}, imports)
testValidate(t, grouperGoimports{}, vopts{}, imports)
testValidate(t, grouperLocalMiddle{}, vopts{}, imports)
testValidate(t, grouperWeird{}, vopts{}, imports)
// Std/other separated but backwards.
imports = `import (
"github.com/Sirupsen/logrus"
"os"
)`
testValidate(t, grouperCombined{}, vopts{invalid: true}, imports)
testValidate(t, grouperGoimports{}, vopts{invalid: true}, imports)
testValidate(t, grouperLocalMiddle{}, vopts{invalid: true}, imports)
testValidate(t, grouperWeird{}, vopts{invalid: true}, imports)
// Std/other/local.
imports = `import (
"os"
"github.com/Sirupsen/logrus"
"local/foo"
)`
testValidate(t, grouperCombined{}, vopts{invalid: true}, imports)
testValidate(t, grouperGoimports{}, vopts{}, imports)
testValidate(t, grouperLocalMiddle{}, vopts{invalid: true}, imports)
testValidate(t, grouperWeird{}, vopts{invalid: true}, imports)
// Std/other/appengine/local.
imports = `import (
"os"
"testing" | "appengine"
"local/foo"
)`
testValidate(t, grouperCombined{}, vopts{invalid: true}, imports)
testValidate(t, grouperGoimports{}, vopts{}, imports)
testValidate(t, grouperLocalMiddle{}, vopts{invalid: true}, imports)
testValidate(t, grouperWeird{}, vopts{invalid: true}, imports)
// Local in the middle.
imports = `import (
"os"
"strings"
"local/bar"
"local/foo"
"github.com/Sirupsen/logrus"
"gopkg.in/redis.v3"
)`
testValidate(t, grouperCombined{}, vopts{invalid: true}, imports)
testValidate(t, grouperGoimports{}, vopts{invalid: true}, imports)
testValidate(t, grouperLocalMiddle{}, vopts{}, imports)
testValidate(t, grouperWeird{}, vopts{invalid: true}, imports)
// Weird ordering, just to prove we can.
imports = `import (
"strings"
"go/parser"
"gopkg.in/redis.v3"
"local/pkg"
"github.com/Sirupsen/logrus"
"local/foo/bar"
)`
testValidate(t, grouperCombined{}, vopts{invalid: true}, imports)
testValidate(t, grouperGoimports{}, vopts{invalid: true}, imports)
testValidate(t, grouperLocalMiddle{}, vopts{invalid: true}, imports)
testValidate(t, grouperWeird{}, vopts{}, imports)
}
func TestValidateEdgeCases(t *testing.T) {
t.Parallel()
// A single import, but with brackets is ok.
imports := `import (
"os"
)`
testValidate(t, grouperGoimports{}, vopts{}, imports)
// Comments are allowed.
imports = `import (
// Comment on a line
"os" // End-of-line comment
/* Multi
line
comment */
"github.com/urfave/cli"
// Multi
// line,
// the other way.
"golang.org/x/net/context"
)`
testValidate(t, grouperGoimports{}, vopts{}, imports)
// Extra newlines are not allowed.
imports = `import (
"os"
"golang.org/x/net/context"
)`
testValidate(t, grouperGoimports{}, vopts{verrstr: errstrGroupExtraLine}, imports)
// Parse errors yield errors.
imports = `import (
"os
)`
testValidate(t, grouperGoimports{}, vopts{err: true}, imports)
// Special imports are allowed, sorted by actual import path.
imports = `import (
b "os"
a "strings"
_ "testing"
. "golang.org/x/net/context"
)`
testValidate(t, grouperGoimports{}, vopts{}, imports)
}
func TestValidateErrors(t *testing.T) {
// TODO
} |
"github.com/Sirupsen/logrus"
|
canvasGraphService.ts | import {
ICanvasGraphServiceSettings,
IPerfMinMax,
IGraphDrawableArea,
IPerfMousePanningPosition,
IPerfIndexBounds,
IPerfTooltip,
IPerfTextMeasureCache,
IPerfLayoutSize,
IPerfTicker,
TimestampUnit,
ITooltipPreprocessedInformation,
IPerfTooltipHoverPosition,
IVisibleRangeChangedObservableProps,
} from "./graphSupportingTypes";
import { IPerfDatasets, IPerfMetadata } from "core/Misc/interfaces/iPerfViewer";
import { Scalar } from "core/Maths/math.scalar";
import { PerformanceViewerCollector } from "core/Misc/PerformanceViewer/performanceViewerCollector";
import { Observable } from "core/Misc/observable";
const defaultColor = "#000";
const axisColor = "#c0c4c8";
const futureBoxColor = "#dfe9ed";
const dividerColor = "#0a3066";
const playheadColor = "#b9dbef";
const positionIndicatorColor = "#4d5960";
const tooltipBackgroundColor = "#566268";
const tooltipForegroundColor = "#fbfbfb";
const topOfGraphY = 0;
const defaultAlpha = 1;
const tooltipBackgroundAlpha = 0.8;
const backgroundLineAlpha = 0.2;
const maxDistanceForHover = 10;
const tooltipHorizontalPadding = 10;
const spaceBetweenTextAndBox = 5;
const tooltipPaddingFromBottom = 20;
// height of indicator triangle
const triangleHeight = 10;
// width of indicator triangle
const triangleWidth = 20;
// padding to indicate how far below the axis line the triangle should be.
const trianglePaddingFromAxisLine = 3;
const tickerHorizontalPadding = 10;
// pixels to pad the top and bottom of data so that it doesn't get cut off by the margins.
const dataPadding = 2;
const playheadSize = 8;
const dividerSize = 2;
const axisLineLength = 10;
const axisPadding = 10;
// Currently the scale factor is a constant but when we add panning this may become formula based.
const scaleFactor = 0.8;
// This controls the scale factor at which we stop drawing the playhead. Below this value there tends to be flickering of the playhead as data comes in.
const stopDrawingPlayheadThreshold = 0.95;
// Threshold for the ratio at which we go from panning mode to live mode.
const returnToLiveThreshold = 0.998;
// Font to use on the addons such as tooltips and tickers!
const graphAddonFont = "12px Arial";
// A string containing the alphabet, used in line height calculation for the font.
const alphabet = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ";
// Arbitrary maximum used to make some GC optimizations.
const maximumDatasetsAllowed = 64;
const msInSecond = 1000;
const msInMinute = msInSecond * 60;
const msInHour = msInMinute * 60;
// time in ms to wait between tooltip draws inside the mouse move.
const tooltipDebounceTime = 32;
// time in ms to wait between draws
const drawThrottleTime = 15;
// What distance percentage in the x axis between two points makes us break the line and draw a "no data" box instead
const maxXDistancePercBetweenLinePoints = 0.1;
// Color used to draw the rectangle that indicates no collection of data
const noDataRectangleColor = "#aaaaaa";
const smoothingFactor = 0.2; // factor to smooth the graph with
const rangeMargin = 0.1; // extra margin to expand the min/max range on the graph
/**
* This function will debounce calls to functions.
*
* @param callback callback to call.
* @param time time to wait between calls in ms.
*/
function debounce(callback: (...args: any[]) => void, time: number) {
let timerId: any;
return function (...args: any[]) {
clearTimeout(timerId);
timerId = setTimeout(() => callback(...args), time);
};
}
/**
* This function will throttle calls to functions.
*
* @param callback callback to call.
* @param time time to wait between calls in ms.
*/
function | (callback: (...args: any[]) => void, time: number) {
let lastCalledTime: number = 0;
return function (...args: any[]) {
const now = Date.now();
if (now - lastCalledTime < time) {
return;
}
lastCalledTime = now;
callback(...args);
};
}
/*
* This class acts as the main API for graphing given a Here is where you will find methods to let the service know new data needs to be drawn,
* let it know something has been resized, etc!
*/
export class CanvasGraphService {
private _ctx: CanvasRenderingContext2D | null;
private _width: number;
private _height: number;
private _sizeOfWindow: number = 300;
private _ticks: number[];
private _panPosition: IPerfMousePanningPosition | null;
private _position: number | null;
private _datasetBounds: IPerfIndexBounds;
private _globalTimeMinMax: IPerfMinMax;
private _hoverPosition: IPerfTooltipHoverPosition | null;
private _drawableArea: IGraphDrawableArea;
private _axisHeight: number;
private _tooltipItems: IPerfTooltip[];
private _tooltipTextCache: IPerfTextMeasureCache;
private _tickerTextCache: IPerfTextMeasureCache;
private _tickerItems: IPerfTicker[];
private _preprocessedTooltipInfo: ITooltipPreprocessedInformation;
private _numberOfTickers: number;
private _onVisibleRangeChangedObservable?: Observable<IVisibleRangeChangedObservableProps>;
private readonly _addonFontLineHeight: number;
private readonly _defaultLineHeight: number;
public readonly datasets: IPerfDatasets;
public metadata: Map<string, IPerfMetadata>;
/**
* Creates an instance of CanvasGraphService.
*
* @param canvas a pointer to the canvas dom element we would like to write to.
* @param settings settings for our service.
*/
constructor(canvas: HTMLCanvasElement, settings: ICanvasGraphServiceSettings) {
this._ctx = canvas.getContext && canvas.getContext("2d");
this._width = canvas.width;
this._height = canvas.height;
this._ticks = [];
this._panPosition = null;
this._hoverPosition = null;
this._position = null;
this._datasetBounds = { start: 0, end: 0 };
this._globalTimeMinMax = { min: Infinity, max: 0 };
this._drawableArea = { top: 0, left: 0, right: 0, bottom: 0 };
this._tooltipTextCache = { text: "", width: 0 };
this._tickerTextCache = { text: "", width: 0 };
this._tooltipItems = [];
this._tickerItems = [];
this._preprocessedTooltipInfo = { focusedId: "", longestText: "", numberOfTooltipItems: 0, xForActualTimestamp: 0 };
this._numberOfTickers = 0;
this._onVisibleRangeChangedObservable = settings.onVisibleRangeChangedObservable;
for (let i = 0; i < maximumDatasetsAllowed; i++) {
this._tooltipItems.push({ text: "", color: "" });
this._tickerItems.push({ text: "", id: "", max: 0, min: 0 });
}
if (!this._ctx) {
throw Error("No canvas context accessible");
}
const defaultMetrics = this._ctx.measureText(alphabet);
this._defaultLineHeight = defaultMetrics.actualBoundingBoxAscent + defaultMetrics.actualBoundingBoxDescent;
this._axisHeight = axisLineLength + axisPadding + this._defaultLineHeight + axisPadding;
this._ctx.save();
this._ctx.font = graphAddonFont;
const fontMetrics = this._ctx.measureText(alphabet);
this._addonFontLineHeight = fontMetrics.actualBoundingBoxAscent + fontMetrics.actualBoundingBoxDescent;
this._ctx.restore();
this.datasets = settings.datasets;
this.metadata = new Map<string, IPerfMetadata>();
this._attachEventListeners(canvas);
}
/**
* This method lets the service know it should get ready to update what it is displaying.
*/
public update = throttle(() => this._draw(), drawThrottleTime);
/**
* Update the canvas graph service with the new height and width of the canvas.
* @param size The new size of the canvas.
*/
public resize(size: IPerfLayoutSize) {
const { _ctx: ctx } = this;
const { width, height } = size;
if (!ctx || !ctx.canvas) {
return;
}
this._width = width;
this._height = height;
ctx.canvas.width = width;
ctx.canvas.height = height;
this.update();
}
/**
* Force resets the position in the data, effectively returning to the most current data.
*/
public resetDataPosition() {
this._position = null;
}
private _prevPointById: Map<string, [number, number]> = new Map<string, [number, number]>();
private _prevValueById: Map<string, number> = new Map<string, number>();
/**
* This method draws the data and sets up the appropriate scales.
*/
private _draw() {
const { _ctx: ctx } = this;
if (!ctx) {
return;
}
const numSlices = this._getNumberOfSlices();
if (numSlices === 0) {
return;
}
// First we clear the canvas so we can draw our data!
this.clear();
// Get global min max of time axis (across all datasets).
this._globalTimeMinMax.min = Infinity;
this._globalTimeMinMax.max = 0;
// First we must get the end positions of our view port.
const pos = this._position ?? numSlices - 1;
let start = pos - Math.ceil(this._sizeOfWindow * scaleFactor);
let startOverflow = 0;
// account for overflow from start.
if (start < 0) {
startOverflow = 0 - start;
start = 0;
}
let end = Math.ceil(pos + this._sizeOfWindow * (1 - scaleFactor) + startOverflow);
// account for overflow from end.
if (end > numSlices) {
const endOverflow = end - numSlices;
end = numSlices;
start = Math.max(start - endOverflow, 0);
}
// update the bounds
this._datasetBounds.start = start;
this._datasetBounds.end = end;
// next we must find the min and max timestamp in bounds. (Timestamps are sorted)
this._globalTimeMinMax.min = this.datasets.data.at(this.datasets.startingIndices.at(this._datasetBounds.start));
this._globalTimeMinMax.max = this.datasets.data.at(this.datasets.startingIndices.at(this._datasetBounds.end - 1));
// set the buffer region maximum by rescaling the max timestamp in bounds.
const bufferMaximum = Math.ceil((this._globalTimeMinMax.max - this._globalTimeMinMax.min) / scaleFactor + this._globalTimeMinMax.min);
// we then need to update the end position based on the maximum for the buffer region
// binary search to get closest point to the buffer maximum.
this._datasetBounds.end = this._getClosestPointToTimestamp(bufferMaximum) + 1;
// keep track of largest timestamp value in view!
this._globalTimeMinMax.max = Math.max(this.datasets.data.at(this.datasets.startingIndices.at(this._datasetBounds.end - 1)), this._globalTimeMinMax.max);
const updatedScaleFactor = Scalar.Clamp((this._globalTimeMinMax.max - this._globalTimeMinMax.min) / (bufferMaximum - this._globalTimeMinMax.min), scaleFactor, 1);
// we will now set the global maximum to the maximum of the buffer.
this._globalTimeMinMax.max = bufferMaximum;
this._drawableArea.top = 0;
this._drawableArea.left = 0;
this._drawableArea.bottom = this._height;
this._drawableArea.right = this._width;
this._drawTickers(this._drawableArea, this._datasetBounds);
this._drawTimeAxis(this._globalTimeMinMax, this._drawableArea);
this._drawPlayheadRegion(this._drawableArea, updatedScaleFactor);
this._drawableArea.top += dataPadding;
this._drawableArea.bottom -= dataPadding;
// pre-process tooltip info so we can use it in determining opacity of lines.
this._preprocessTooltip(this._hoverPosition, this._drawableArea);
const { left, right, bottom, top } = this._drawableArea;
// process, and then draw our points
this.datasets.ids.forEach((id, idOffset) => {
let valueMinMax: IPerfMinMax | undefined;
let prevPoint = this._prevPointById.get(id);
let prevValue = this._prevValueById.get(id);
let ticker = false;
for (let i = 0; i < this._numberOfTickers; i++) {
if (this._tickerItems[i].id === id) {
ticker = true;
}
}
if (!ticker) {
return;
}
ctx.beginPath();
ctx.strokeStyle = this.metadata.get(id)?.color ?? defaultColor;
// if we are focused on a line and not in live mode handle the opacities appropriately.
if (this._preprocessedTooltipInfo.focusedId === id) {
ctx.globalAlpha = defaultAlpha;
} else if (this._preprocessedTooltipInfo.focusedId !== "") {
ctx.globalAlpha = backgroundLineAlpha;
}
const values = new Array(this._datasetBounds.end - this._datasetBounds.start);
for (let pointIndex = this._datasetBounds.start; pointIndex < this._datasetBounds.end; pointIndex++) {
const numPoints = this.datasets.data.at(this.datasets.startingIndices.at(pointIndex) + PerformanceViewerCollector.NumberOfPointsOffset);
if (idOffset >= numPoints) {
continue;
}
const valueIndex = this.datasets.startingIndices.at(pointIndex) + PerformanceViewerCollector.SliceDataOffset + idOffset;
const value = this.datasets.data.at(valueIndex);
if (prevValue === undefined) {
prevValue = value;
this._prevValueById.set(id, prevValue);
}
// perform smoothing
const smoothedValue = smoothingFactor * value + (1 - smoothingFactor) * prevValue;
values[pointIndex - this._datasetBounds.start] = smoothedValue;
if (!valueMinMax) {
valueMinMax = {
min: smoothedValue,
max: smoothedValue,
};
}
this._prevValueById.set(id, smoothedValue);
valueMinMax.min = Math.min(valueMinMax.min, smoothedValue);
valueMinMax.max = Math.max(valueMinMax.max, smoothedValue);
}
const delta = valueMinMax!.max - valueMinMax!.min;
valueMinMax!.min -= rangeMargin * delta;
valueMinMax!.max += rangeMargin * delta;
for (let pointIndex = this._datasetBounds.start; pointIndex < this._datasetBounds.end; pointIndex++) {
const timestamp = this.datasets.data.at(this.datasets.startingIndices.at(pointIndex));
const smoothedValue = values[pointIndex - this._datasetBounds.start];
const drawableTime = this._getPixelForNumber(timestamp, this._globalTimeMinMax, left, right - left, false);
const drawableValue = this._getPixelForNumber(smoothedValue, valueMinMax!, top, bottom - top, true);
if (prevPoint === undefined) {
prevPoint = [drawableTime, drawableValue];
this._prevPointById.set(id, prevPoint);
}
const xDifference = drawableTime - prevPoint[0];
const skipLine = xDifference > maxXDistancePercBetweenLinePoints * (right - left);
if (skipLine) {
ctx.fillStyle = noDataRectangleColor;
ctx.fillRect(prevPoint[0], top, xDifference, bottom - top);
} else {
if (prevPoint[0] < drawableTime) {
ctx.moveTo(prevPoint[0], prevPoint[1]);
ctx.lineTo(drawableTime, drawableValue);
}
}
prevPoint[0] = drawableTime;
prevPoint[1] = drawableValue;
}
ctx.stroke();
});
ctx.globalAlpha = defaultAlpha;
// then draw the tooltip.
this._drawTooltip(this._hoverPosition, this._drawableArea);
}
private _drawTickers(drawableArea: IGraphDrawableArea, bounds: IPerfIndexBounds) {
const { _ctx: ctx } = this;
if (!ctx) {
return;
}
// create the ticker objects for each of the non hidden items.
let longestText: string = "";
this._numberOfTickers = 0;
const valueMap = new Map<string, IPerfMinMax>();
this.datasets.ids.forEach((id, idOffset) => {
if (this.metadata.get(id)?.hidden) {
return;
}
const valueMinMax = this._getMinMax(bounds, idOffset);
const latestValue = this.datasets.data.at(this.datasets.startingIndices.at(bounds.end - 1) + PerformanceViewerCollector.SliceDataOffset + idOffset);
const text = `${id}: ${latestValue.toFixed(2)} (max: ${valueMinMax.max.toFixed(2)}, min: ${valueMinMax.min.toFixed(2)})`;
valueMap.set(id, {
min: valueMinMax.min,
max: valueMinMax.max,
current: latestValue,
});
if (text.length > longestText.length) {
longestText = text;
}
this._tickerItems[this._numberOfTickers].id = id;
this._tickerItems[this._numberOfTickers].max = valueMinMax.max;
this._tickerItems[this._numberOfTickers].min = valueMinMax.min;
this._tickerItems[this._numberOfTickers].text = text;
this._numberOfTickers++;
});
this._onVisibleRangeChangedObservable?.notifyObservers({ valueMap });
ctx.save();
ctx.font = graphAddonFont;
ctx.textBaseline = "middle";
ctx.textAlign = "left";
let width: number;
// if the lengths are the same the estimate should be good enough given the padding.
if (this._tickerTextCache.text.length === longestText.length) {
width = this._tickerTextCache.width;
} else {
width = ctx.measureText(longestText).width + 2 * tickerHorizontalPadding;
this._tickerTextCache.text = longestText;
this._tickerTextCache.width = width;
}
ctx.restore();
}
/**
* Returns the index of the closest time for the datasets.
* Uses a modified binary search to get value.
*
* @param targetTime the time we want to get close to.
* @returns index of the item with the closest time to the targetTime
*/
private _getClosestPointToTimestamp(targetTime: number): number {
let low = 0;
let high = this._getNumberOfSlices() - 1;
let closestIndex = 0;
while (low <= high) {
const middle = Math.trunc((low + high) / 2);
const middleTimestamp = this.datasets.data.at(this.datasets.startingIndices.at(middle));
if (Math.abs(middleTimestamp - targetTime) < Math.abs(this.datasets.data.at(this.datasets.startingIndices.at(closestIndex)) - targetTime)) {
closestIndex = middle;
}
if (middleTimestamp < targetTime) {
low = middle + 1;
} else if (middleTimestamp > targetTime) {
high = middle - 1;
} else {
break;
}
}
return closestIndex;
}
/**
* This is a convenience method to get the number of collected slices.
* @returns the total number of collected slices.
*/
private _getNumberOfSlices() {
return this.datasets.startingIndices.itemLength;
}
/**
* Draws the time axis, adjusts the drawable area for the graph.
*
* @param timeMinMax the minimum and maximum for the time axis.
* @param drawableArea the current allocated drawable area.
*/
private _drawTimeAxis(timeMinMax: IPerfMinMax, drawableArea: IGraphDrawableArea) {
const { _ctx: ctx } = this;
if (!ctx) {
return;
}
const spaceAvailable = drawableArea.right - drawableArea.left;
this._generateTicks(timeMinMax, spaceAvailable);
// remove the height of the axis from the available drawable area.
drawableArea.bottom -= this._axisHeight;
// draw axis box.
ctx.save();
ctx.fillStyle = axisColor;
ctx.fillRect(drawableArea.left, drawableArea.bottom, spaceAvailable, this._axisHeight);
// draw time axis line
ctx.beginPath();
ctx.strokeStyle = defaultColor;
ctx.moveTo(drawableArea.left, drawableArea.bottom);
ctx.lineTo(drawableArea.right, drawableArea.bottom);
// draw ticks and text.
ctx.fillStyle = defaultColor;
ctx.textAlign = "center";
ctx.textBaseline = "middle";
const timestampUnit: TimestampUnit = this._getTimestampUnit(this._ticks[this._ticks.length - 1]);
this._ticks.forEach((tick: number) => {
let position = this._getPixelForNumber(tick, timeMinMax, drawableArea.left, spaceAvailable, false);
if (position > spaceAvailable) {
position = spaceAvailable;
}
ctx.moveTo(position, drawableArea.bottom);
ctx.lineTo(position, drawableArea.bottom + 10);
ctx.fillText(this._parseTimestamp(tick, timestampUnit), position, drawableArea.bottom + 20);
});
ctx.stroke();
ctx.restore();
}
/**
* Given a timestamp (should be the maximum timestamp in view), this function returns the maximum unit the timestamp contains.
* This information can be used for formatting purposes.
* @param timestamp the maximum timestamp to find the maximum timestamp unit for.
* @returns The maximum unit the timestamp has.
*/
private _getTimestampUnit(timestamp: number): TimestampUnit {
if (timestamp / msInHour > 1) {
return TimestampUnit.Hours;
} else if (timestamp / msInMinute > 1) {
return TimestampUnit.Minutes;
} else if (timestamp / msInSecond > 1) {
return TimestampUnit.Seconds;
} else {
return TimestampUnit.Milliseconds;
}
}
/**
* Given a timestamp and the interval unit, this function will parse the timestamp to the appropriate format.
* @param timestamp The timestamp to parse
* @param intervalUnit The maximum unit of the maximum timestamp in an interval.
* @returns a string representing the parsed timestamp.
*/
private _parseTimestamp(timestamp: number, intervalUnit: TimestampUnit): string {
let parsedTimestamp = "";
if (intervalUnit >= TimestampUnit.Hours) {
const numHours = Math.floor(timestamp / msInHour);
timestamp -= numHours * msInHour;
parsedTimestamp += `${numHours.toString().padStart(intervalUnit > TimestampUnit.Hours ? 2 : 1, "0")}:`;
}
if (intervalUnit >= TimestampUnit.Minutes) {
const numMinutes = Math.floor(timestamp / msInMinute);
timestamp -= numMinutes * msInMinute;
parsedTimestamp += `${numMinutes.toString().padStart(intervalUnit > TimestampUnit.Minutes ? 2 : 1, "0")}:`;
}
const numSeconds = Math.floor(timestamp / msInSecond);
timestamp -= numSeconds * msInSecond;
parsedTimestamp += numSeconds.toString().padStart(intervalUnit > TimestampUnit.Seconds ? 2 : 1, "0");
if (timestamp > 0) {
if (parsedTimestamp.length > 0) {
parsedTimestamp += ".";
}
parsedTimestamp += Math.round(timestamp).toString().padStart(3, "0");
}
return parsedTimestamp;
}
/**
* Generates a list of ticks given the min and max of the axis, and the space available in the axis.
*
* @param minMax the minimum and maximum values of the axis
* @param spaceAvailable the total amount of space we have allocated to our axis
*/
private _generateTicks(minMax: IPerfMinMax, spaceAvailable: number) {
const { min, max } = minMax;
const minTickSpacing = 40;
this._ticks.length = 0;
const maxTickCount = Math.ceil(spaceAvailable / minTickSpacing);
const range = this._niceNumber(max - min, false);
const spacing = this._niceNumber(range / (maxTickCount - 1), true);
const niceMin = Math.floor(min / spacing) * spacing;
const niceMax = Math.floor(max / spacing) * spacing;
for (let i = niceMin; i <= niceMax + 0.5 * spacing; i += spacing) {
this._ticks.push(i);
}
}
/**
* Nice number algorithm based on psueudo code defined in "Graphics Gems" by Andrew S. Glassner.
* This will find a "nice" number approximately equal to num.
*
* @param num The number we want to get close to.
* @param shouldRound if true we will round the number, otherwise we will get the ceiling.
* @returns a "nice" number approximately equal to num.
*/
private _niceNumber(num: number, shouldRound: boolean) {
const exp = Math.floor(Math.log10(num));
const fraction = num / Math.pow(10, exp);
let niceFraction: number;
if (shouldRound) {
if (fraction < 1.5) {
niceFraction = 1;
} else if (fraction < 3) {
niceFraction = 2;
} else if (fraction < 7) {
niceFraction = 5;
} else {
niceFraction = 10;
}
} else {
if (fraction <= 1) {
niceFraction = 1;
} else if (fraction <= 2) {
niceFraction = 2;
} else if (fraction <= 5) {
niceFraction = 5;
} else {
niceFraction = 10;
}
}
return niceFraction * Math.pow(10, exp);
}
/**
* Gets the min and max as a single object from an array of numbers.
*
* @param items the array of numbers to get the min and max for.
* @param bounds
* @param offset
* @returns the min and max of the array.
*/
private _getMinMax(bounds: IPerfIndexBounds, offset: number): IPerfMinMax {
let min = Infinity,
max = 0;
for (let i = bounds.start; i < bounds.end; i++) {
const numPoints = this.datasets.data.at(this.datasets.startingIndices.at(i) + PerformanceViewerCollector.NumberOfPointsOffset);
if (offset >= numPoints) {
continue;
}
const itemIndex = this.datasets.startingIndices.at(i) + PerformanceViewerCollector.SliceDataOffset + offset;
const item = this.datasets.data.at(itemIndex);
if (item < min) {
min = item;
}
if (item > max) {
max = item;
}
}
return {
min,
max,
};
}
/**
* Converts a single number to a pixel coordinate in a single axis by normalizing the data to a [0, 1] scale using the minimum and maximum values.
*
* @param num the number we want to get the pixel coordinate for
* @param minMax the min and max of the dataset in the axis we want the pixel coordinate for.
* @param startingPixel the starting pixel coordinate (this means it takes account for any offset).
* @param spaceAvailable the total space available in this axis.
* @param shouldFlipValue if we should use a [1, 0] scale instead of a [0, 1] scale.
* @returns the pixel coordinate of the value in a single axis.
*/
private _getPixelForNumber(num: number, minMax: IPerfMinMax, startingPixel: number, spaceAvailable: number, shouldFlipValue: boolean) {
const { min, max } = minMax;
// Perform a min-max normalization to rescale the value onto a [0, 1] scale given the min and max of the dataset.
let normalizedValue = Math.abs(max - min) > 0.001 ? (num - min) / (max - min) : 0.5;
// if we should make this a [1, 0] range instead (higher numbers = smaller pixel value)
if (shouldFlipValue) {
normalizedValue = 1 - normalizedValue;
}
return startingPixel + normalizedValue * spaceAvailable;
}
/**
* Add in any necessary event listeners.
*
* @param canvas The canvas we want to attach listeners to.
*/
private _attachEventListeners(canvas: HTMLCanvasElement) {
canvas.addEventListener("wheel", this._handleZoom);
canvas.addEventListener("mousemove", this._handleDataHover);
canvas.addEventListener("mousedown", this._handlePanStart);
canvas.addEventListener("mouseleave", this._handleStopHover);
// The user may stop panning outside of the canvas size so we should add the event listener to the document.
canvas.ownerDocument.addEventListener("mouseup", this._handlePanStop);
}
/**
* We remove all event listeners we added.
*
* @param canvas The canvas we want to remove listeners from.
*/
private _removeEventListeners(canvas: HTMLCanvasElement) {
canvas.removeEventListener("wheel", this._handleZoom);
canvas.removeEventListener("mousemove", this._handleDataHover);
canvas.removeEventListener("mousedown", this._handlePanStart);
canvas.removeEventListener("mouseleave", this._handleStopHover);
canvas.ownerDocument.removeEventListener("mouseup", this._handlePanStop);
}
/**
* Handles what to do when we are hovering over the canvas and not panning.
*
* @param event A reference to the event to be handled.
*/
private _handleDataHover = (event: MouseEvent) => {
if (this._panPosition) {
// we don't want to do anything if we are in the middle of panning
return;
}
this._hoverPosition = { xPos: event.clientX, yPos: event.clientY };
// process and draw the tooltip.
this._debouncedTooltip(this._hoverPosition, this._drawableArea);
};
/**
* Debounced processing and drawing of tooltip.
*/
private _debouncedTooltip = debounce((pos: IPerfTooltipHoverPosition | null, drawableArea: IGraphDrawableArea) => {
this._preprocessTooltip(pos, drawableArea);
this._drawTooltip(pos, drawableArea);
}, tooltipDebounceTime);
/**
* Handles what to do when we stop hovering over the canvas.
*/
private _handleStopHover = () => {
this._hoverPosition = null;
};
/**
* Given a line defined by P1: (x1, y1) and P2: (x2, y2) get the distance of P0 (x0, y0) from the line.
* https://en.wikipedia.org/wiki/Distance_from_a_point_to_a_line#Line_defined_by_two_points
* @param x1 x position of point P1
* @param y1 y position of point P1
* @param x2 x position of point P2
* @param y2 y position of point P2
* @param x0 x position of point P0
* @param y0 y position of point P0
* @returns distance of P0 from the line defined by P1 and P2
*/
private _getDistanceFromLine(x1: number, y1: number, x2: number, y2: number, x0: number, y0: number): number {
// if P1 and P2 are the same we just get the distance between P1 and P0
if (x1 === x2 && y1 === y2) {
return Math.sqrt(Math.pow(x1 - x0, 2) + Math.pow(y1 - y0, 2));
}
// next we want to handle the case where our point is beyond the y position of our line
let topX = 0;
let topY = 0;
let bottomX = 0;
let bottomY = 0;
if (y1 >= y2) {
topX = x1;
topY = y1;
bottomX = x2;
bottomY = y2;
} else {
topX = x2;
topY = y2;
bottomX = x1;
bottomY = y1;
}
if (y0 < bottomY) {
return Math.sqrt(Math.pow(bottomX - x0, 2) + Math.pow(bottomY - y0, 2));
}
if (y0 > topY) {
return Math.sqrt(Math.pow(topX - x0, 2) + Math.pow(topY - y0, 2));
}
// the general case!
const numerator = Math.abs((x2 - x1) * (y1 - y0) - (x1 - x0) * (y2 - y1));
const denominator = Math.sqrt(Math.pow(x2 - x1, 2) + Math.pow(y2 - y1, 2));
return numerator / denominator;
}
/**
* This method does preprocessing calculations for the tooltip.
* @param pos the position of our mouse.
* @param drawableArea the remaining drawable area.
*/
private _preprocessTooltip(pos: IPerfTooltipHoverPosition | null, drawableArea: IGraphDrawableArea) {
const { _ctx: ctx } = this;
if (pos === null || !ctx || !ctx.canvas || this._getNumberOfSlices() === 0) {
return;
}
const { left, top } = ctx.canvas.getBoundingClientRect();
const adjustedYPos = pos.yPos - top;
let adjustedXPos = pos.xPos - left;
if (adjustedXPos > drawableArea.right) {
adjustedXPos = drawableArea.right;
}
// convert the mouse x position in pixels to a timestamp.
const inferredTimestamp = this._getNumberFromPixel(adjustedXPos, this._globalTimeMinMax, drawableArea.left, drawableArea.right, false);
let longestText: string = "";
let numberOfTooltipItems = 0;
// get the closest timestamps to the target timestamp, and store the appropriate meta object.
const closestIndex = this._getClosestPointToTimestamp(inferredTimestamp);
let actualTimestamp: number = 0;
let closestLineId: string = "";
let closestLineValueMinMax: IPerfMinMax = { min: 0, max: 0 };
let closestLineDistance: number = Number.POSITIVE_INFINITY;
this.datasets.ids.forEach((id, idOffset) => {
if (this.metadata.get(id)?.hidden) {
return;
}
const numPoints = this.datasets.data.at(this.datasets.startingIndices.at(closestIndex) + PerformanceViewerCollector.NumberOfPointsOffset);
if (idOffset >= numPoints) {
return;
}
const valueAtClosestPointIndex = this.datasets.startingIndices.at(closestIndex) + PerformanceViewerCollector.SliceDataOffset + idOffset;
const valueAtClosestPoint = this.datasets.data.at(valueAtClosestPointIndex);
let valueMinMax: IPerfMinMax | undefined;
// we would have already calculated the min and max while getting the tickers, so use those, and get first one.
for (let i = 0; i < this._numberOfTickers; i++) {
if (this._tickerItems[i].id === id) {
valueMinMax = this._tickerItems[i];
}
}
if (!valueMinMax) {
return;
}
actualTimestamp = this.datasets.data.at(this.datasets.startingIndices.at(closestIndex));
const valueAtClosestPointYPos = this._getPixelForNumber(valueAtClosestPoint, valueMinMax, drawableArea.top, drawableArea.bottom - drawableArea.top, true);
const xForActualTimestamp = this._getPixelForNumber(actualTimestamp, this._globalTimeMinMax, drawableArea.left, drawableArea.right - drawableArea.left, false);
const text = `${id}: ${valueAtClosestPoint.toFixed(2)}`;
if (text.length > longestText.length) {
longestText = text;
}
this._tooltipItems[numberOfTooltipItems].text = text;
this._tooltipItems[numberOfTooltipItems].color = this.metadata.get(id)?.color ?? defaultColor;
numberOfTooltipItems++;
// don't process rest if we aren't panned.
if (!this._position) {
return;
}
// initially distance between closest data point and mouse point.
let distance: number = this._getDistanceFromLine(
xForActualTimestamp,
valueAtClosestPointYPos,
xForActualTimestamp,
valueAtClosestPointYPos,
pos.xPos - left,
adjustedYPos
);
// get the shortest distance between the point and the line segment infront, and line segment behind, store the shorter distance (if shorter than distance between closest data point and mouse).
if (
closestIndex + 1 < this.datasets.data.itemLength &&
this.datasets.data.at(this.datasets.startingIndices.at(closestIndex + 1) + PerformanceViewerCollector.NumberOfPointsOffset) > idOffset
) {
const secondPointTimestamp = this.datasets.data.at(this.datasets.startingIndices.at(closestIndex + 1));
const secondPointX = this._getPixelForNumber(secondPointTimestamp, this._globalTimeMinMax, drawableArea.left, drawableArea.right - drawableArea.left, false);
const secondPointValue = this.datasets.data.at(this.datasets.startingIndices.at(closestIndex + 1) + PerformanceViewerCollector.SliceDataOffset + idOffset);
const secondPointY = this._getPixelForNumber(secondPointValue, valueMinMax, drawableArea.top, drawableArea.bottom - drawableArea.top, true);
distance = Math.min(this._getDistanceFromLine(xForActualTimestamp, valueAtClosestPointYPos, secondPointX, secondPointY, pos.xPos - left, adjustedYPos), distance);
}
if (closestIndex - 1 >= 0 && this.datasets.data.at(this.datasets.startingIndices.at(closestIndex + 1) + PerformanceViewerCollector.NumberOfPointsOffset) > idOffset) {
const secondPointTimestamp = this.datasets.data.at(this.datasets.startingIndices.at(closestIndex - 1));
const secondPointX = this._getPixelForNumber(secondPointTimestamp, this._globalTimeMinMax, drawableArea.left, drawableArea.right - drawableArea.left, false);
const secondPointValue = this.datasets.data.at(this.datasets.startingIndices.at(closestIndex - 1) + PerformanceViewerCollector.SliceDataOffset + idOffset);
const secondPointY = this._getPixelForNumber(secondPointValue, valueMinMax, drawableArea.top, drawableArea.bottom - drawableArea.top, true);
distance = Math.min(this._getDistanceFromLine(xForActualTimestamp, valueAtClosestPointYPos, secondPointX, secondPointY, pos.xPos - left, adjustedYPos), distance);
}
if (distance < closestLineDistance) {
closestLineId = id;
closestLineDistance = distance;
closestLineValueMinMax = valueMinMax;
}
});
const xForActualTimestamp = this._getPixelForNumber(actualTimestamp, this._globalTimeMinMax, drawableArea.left, drawableArea.right - drawableArea.left, false);
this._preprocessedTooltipInfo.xForActualTimestamp = xForActualTimestamp;
// check if hover is within a certain distance, if so it is our only item in our tooltip.
if (closestLineDistance <= maxDistanceForHover && this._position) {
this._preprocessedTooltipInfo.focusedId = closestLineId;
const inferredValue = this._getNumberFromPixel(adjustedYPos, closestLineValueMinMax, drawableArea.top, drawableArea.bottom, true);
const closestLineText = `${closestLineId}: ${inferredValue.toFixed(2)}`;
this._preprocessedTooltipInfo.longestText = closestLineText;
this._preprocessedTooltipInfo.numberOfTooltipItems = 1;
this._tooltipItems[0].text = closestLineText;
this._tooltipItems[0].color = this.metadata.get(closestLineId)?.color ?? defaultColor;
} else {
this._preprocessedTooltipInfo.focusedId = "";
this._preprocessedTooltipInfo.longestText = longestText;
this._preprocessedTooltipInfo.numberOfTooltipItems = numberOfTooltipItems;
}
}
/**
* Draws the tooltip given the area it is allowed to draw in and the current pixel position.
*
* @param pos the position of the mouse cursor in pixels (x, y).
* @param drawableArea the available area we can draw in.
*/
private _drawTooltip(pos: IPerfTooltipHoverPosition | null, drawableArea: IGraphDrawableArea) {
const { _ctx: ctx } = this;
if (pos === null || !ctx || !ctx.canvas || this._getNumberOfSlices() === 0) {
return;
}
const { left, top } = ctx.canvas.getBoundingClientRect();
const { numberOfTooltipItems, xForActualTimestamp, longestText } = this._preprocessedTooltipInfo;
ctx.save();
// draw pointer triangle
ctx.fillStyle = positionIndicatorColor;
const yTriangle = drawableArea.bottom + trianglePaddingFromAxisLine;
ctx.beginPath();
ctx.moveTo(xForActualTimestamp, yTriangle);
ctx.lineTo(xForActualTimestamp + triangleWidth / 2, yTriangle + triangleHeight);
ctx.lineTo(xForActualTimestamp - triangleWidth / 2, yTriangle + triangleHeight);
ctx.closePath();
ctx.fill();
ctx.strokeStyle = positionIndicatorColor;
ctx.beginPath();
// draw vertical or horizontal line depending on if focused on a point on the line.
if (this._preprocessedTooltipInfo.focusedId === "") {
ctx.moveTo(xForActualTimestamp, drawableArea.bottom);
ctx.lineTo(xForActualTimestamp, topOfGraphY);
} else {
const lineY = pos.yPos - top;
ctx.moveTo(drawableArea.left, lineY);
ctx.lineTo(drawableArea.right, lineY);
}
ctx.stroke();
// draw the actual tooltip
ctx.font = graphAddonFont;
ctx.textBaseline = "middle";
ctx.textAlign = "left";
const boxLength = this._addonFontLineHeight;
const textHeight = this._addonFontLineHeight + Math.floor(tooltipHorizontalPadding / 2);
// initialize width with cached value or measure width of longest text and update cache.
let width: number;
if (longestText === this._tooltipTextCache.text) {
width = this._tooltipTextCache.width;
} else {
width = ctx.measureText(longestText).width + boxLength + 2 * tooltipHorizontalPadding + spaceBetweenTextAndBox;
this._tooltipTextCache.text = longestText;
this._tooltipTextCache.width = width;
}
const tooltipHeight = textHeight * (numberOfTooltipItems + 1);
let x = pos.xPos - left;
let y = drawableArea.bottom - tooltipPaddingFromBottom - tooltipHeight;
// We want the tool tip to always be inside the canvas so we adjust which way it is drawn.
if (x + width > this._width) {
x -= width;
}
ctx.globalAlpha = tooltipBackgroundAlpha;
ctx.fillStyle = tooltipBackgroundColor;
ctx.fillRect(x, y, width, tooltipHeight);
ctx.globalAlpha = defaultAlpha;
x += tooltipHorizontalPadding;
y += textHeight;
for (let i = 0; i < numberOfTooltipItems; i++) {
const tooltipItem = this._tooltipItems[i];
ctx.fillStyle = tooltipItem.color;
ctx.fillRect(x, y - Math.floor(boxLength / 2), boxLength, boxLength);
ctx.fillStyle = tooltipForegroundColor;
ctx.fillText(tooltipItem.text, x + boxLength + spaceBetweenTextAndBox, y);
y += textHeight;
}
ctx.restore();
}
/**
* Gets the number from a pixel position given the minimum and maximum value in range, and the starting pixel and the ending pixel.
*
* @param pixel current pixel position we want to get the number for.
* @param minMax the minimum and maximum number in the range.
* @param startingPixel position of the starting pixel in range.
* @param endingPixel position of ending pixel in range.
* @param shouldFlipValue if we should use a [1, 0] scale instead of a [0, 1] scale.
* @param shouldFlip
* @returns number corresponding to pixel position
*/
private _getNumberFromPixel(pixel: number, minMax: IPerfMinMax, startingPixel: number, endingPixel: number, shouldFlip: boolean): number {
// normalize pixel to range [0, 1].
let normalizedPixelPosition = (pixel - startingPixel) / (endingPixel - startingPixel);
// we should use a [1, 0] scale instead.
if (shouldFlip) {
normalizedPixelPosition = 1 - normalizedPixelPosition;
}
return minMax.min + normalizedPixelPosition * (minMax.max - minMax.min);
}
/**
* The handler for when we want to zoom in and out of the graph.
*
* @param event a mouse wheel event.
*/
private _handleZoom = (event: WheelEvent) => {
event.preventDefault();
if (!event.deltaY) {
return;
}
const amount = ((event.deltaY * -0.01) | 0) * 100;
const minZoom = 60;
// The max zoom is the number of slices.
const maxZoom = this._getNumberOfSlices();
if (this._shouldBecomeRealtime()) {
this._position = null;
}
// Bind the zoom between [minZoom, maxZoom]
this._sizeOfWindow = Scalar.Clamp(this._sizeOfWindow - amount, minZoom, maxZoom);
};
/**
* Initializes the panning object and attaches appropriate listener.
*
* @param event the mouse event containing positional information.
*/
private _handlePanStart = (event: MouseEvent) => {
const { _ctx: ctx } = this;
if (!ctx || !ctx.canvas) {
return;
}
const canvas = ctx.canvas;
this._panPosition = {
xPos: event.clientX,
delta: 0,
};
this._hoverPosition = null;
canvas.addEventListener("mousemove", this._handlePan);
};
/**
* While panning this event will keep track of the delta and update the "positions".
*
* @param event The mouse event that contains positional information.
*/
private _handlePan = (event: MouseEvent) => {
if (!this._panPosition || this._getNumberOfSlices() === 0) {
return;
}
const pixelDelta = this._panPosition.delta + event.clientX - this._panPosition.xPos;
const pixelsPerItem = (this._drawableArea.right - this._drawableArea.left) / this._sizeOfWindow;
const itemsDelta = (pixelDelta / pixelsPerItem) | 0;
const pos = this._position ?? this._getNumberOfSlices() - 1;
// update our position without allowing the user to pan more than they need to (approximation)
this._position = Scalar.Clamp(
pos - itemsDelta,
Math.floor(this._sizeOfWindow * scaleFactor),
this._getNumberOfSlices() - Math.floor(this._sizeOfWindow * (1 - scaleFactor))
);
if (itemsDelta === 0) {
this._panPosition.delta += pixelDelta;
} else {
this._panPosition.delta = 0;
}
this._panPosition.xPos = event.clientX;
this._prevPointById.clear();
this._prevValueById.clear();
};
/**
* Clears the panning object and removes the appropriate listener.
*
* @param event the mouse event containing positional information.
*/
private _handlePanStop = () => {
const { _ctx: ctx } = this;
if (!ctx || !ctx.canvas) {
return;
}
// check if we should return to realtime.
if (this._shouldBecomeRealtime()) {
this._position = null;
}
const canvas = ctx.canvas;
canvas.removeEventListener("mousemove", this._handlePan);
this._panPosition = null;
};
/**
* Method which returns true if the data should become realtime, false otherwise.
*
* @returns if the data should become realtime or not.
*/
private _shouldBecomeRealtime(): boolean {
if (this._getNumberOfSlices() === 0) {
return false;
}
// we need to compare our current slice to the latest slice to see if we should return to realtime mode.
const pos = this._position;
const latestSlicePos = this._getNumberOfSlices() - 1;
if (pos === null) {
return false;
}
// account for overflow on the left side only as it will be the one determining if we have sufficiently caught up to the realtime data.
const overflow = Math.max(0 - (pos - Math.ceil(this._sizeOfWindow * scaleFactor)), 0);
const rightmostPos = Math.min(overflow + pos + Math.ceil(this._sizeOfWindow * (1 - scaleFactor)), latestSlicePos);
return (
this.datasets.data.at(this.datasets.startingIndices.at(rightmostPos)) / this.datasets.data.at(this.datasets.startingIndices.at(latestSlicePos)) > returnToLiveThreshold
);
}
/**
* Will generate a playhead with a futurebox that takes up (1-scalefactor)*100% of the canvas.
*
* @param drawableArea The remaining drawable area.
* @param scaleFactor The Percentage between 0.0 and 1.0 of the canvas the data gets drawn on.
*/
private _drawPlayheadRegion(drawableArea: IGraphDrawableArea, scaleFactor: number) {
const { _ctx: ctx } = this;
if (!ctx || scaleFactor >= stopDrawingPlayheadThreshold) {
return;
}
const dividerXPos = Math.ceil(drawableArea.right * scaleFactor);
const playheadPos = dividerXPos - playheadSize;
const futureBoxPos = dividerXPos + dividerSize;
const rectangleHeight = drawableArea.bottom - drawableArea.top - 1;
ctx.save();
ctx.fillStyle = futureBoxColor;
ctx.fillRect(futureBoxPos, drawableArea.top, drawableArea.right - futureBoxPos, rectangleHeight);
ctx.fillStyle = dividerColor;
ctx.fillRect(dividerXPos, drawableArea.top, dividerSize, rectangleHeight);
ctx.fillStyle = playheadColor;
ctx.fillRect(playheadPos, drawableArea.top, playheadSize, rectangleHeight);
ctx.restore();
}
/**
* Method to do cleanup when the object is done being used.
*
*/
public destroy() {
if (!this._ctx || !this._ctx.canvas) {
return;
}
this._removeEventListeners(this._ctx.canvas);
this._ctx = null;
}
/**
* This method clears the canvas
*/
public clear() {
const { _ctx: ctx, _width, _height } = this;
// If we do not have a context we can't really do much here!
if (!ctx) {
return;
}
// save the transformation matrix, clear the canvas then restore.
ctx.save();
ctx.resetTransform();
ctx.clearRect(0, 0, _width, _height);
ctx.restore();
}
}
| throttle |
q380_test_ins_del_rand_const.py | from src.base.test_cases import TestCases
from src.utility.constants import INSERT, REMOVE, GET_RANDOM
class InsDelRandConstTestCases(TestCases):
def __init__(self):
| super(InsDelRandConstTestCases, self).__init__()
self.__add_test_case__('Test Insert 1', (INSERT, 1), True)
self.__add_test_case__('Test Remove 2', (REMOVE, 2), False)
self.__add_test_case__('Test Insert 2', (INSERT, 2), True)
self.__add_test_case__('Test Random', (GET_RANDOM, [1,2]), True)
self.__add_test_case__('Test Remove 1', (REMOVE, 1), True)
self.__add_test_case__('Test Insert 2', (INSERT, 2), False)
self.__add_test_case__('Test Random', (GET_RANDOM, [2]), True)
self.__add_test_case__('Test Remove 0', (REMOVE, 0), False)
self.__add_test_case__('Test Remove 0', (REMOVE, 0), False)
self.__add_test_case__('Test Insert 0', (INSERT, 0), True)
self.__add_test_case__('Test Random', (GET_RANDOM, [0,2]), True)
self.__add_test_case__('Test Remove 0', (REMOVE, 0), True)
self.__add_test_case__('Test Insert 0', (INSERT, 0), True) |
|
index.tsx | import React from 'react';
import ReactDOM from 'react-dom';
import 'bootstrap/dist/css/bootstrap.min.css';
import './index.css';
import App from './App';
import reportWebVitals from './reportWebVitals';
import { ConnectedRouter } from 'connected-react-router';
import { Provider } from 'react-redux';
import { history, store } from './store';
ReactDOM.render(
<React.StrictMode>
<Provider store={store}>
<ConnectedRouter history={history}>
<App />
</ConnectedRouter>
</Provider> | </React.StrictMode>,
document.getElementById('root')
);
// If you want to start measuring performance in your app, pass a function
// to log results (for example: reportWebVitals(console.log))
// or send to an analytics endpoint. Learn more: https://bit.ly/CRA-vitals
reportWebVitals(); | |
node.go | package httpmux
import "net/http"
type node struct {
len int
handlerFuncs []http.HandlerFunc
handlers []http.Handler
}
func newNode(len int) *node {
return &node{
len: len,
handlerFuncs: make([]http.HandlerFunc, len),
handlers: make([]http.Handler, len),
}
}
func (n *node) add(pos int, h interface{}) {
if h, ok := h.(http.HandlerFunc); ok {
n.handlerFuncs[pos] = h
return
}
if h, ok := h.(http.Handler); ok |
}
| {
n.handlers[pos] = h
} |
Optimizers.py | import os
import sys
root_path = os.path.abspath("../")
if root_path not in sys.path:
sys.path.append(root_path)
import tensorflow as tf
class Optimizer:
def __init__(self, lr=1e-3):
self._lr = lr
self._opt = None
@property
def name(self):
return str(self)
def minimize(self, x, *args, **kwargs):
return self._opt.minimize(x, *args, **kwargs)
def __str__(self):
return self.__class__.__name__
def | (self):
return str(self)
class MBGD(Optimizer):
def __init__(self, lr=1e-3):
Optimizer.__init__(self, lr)
self._opt = tf.train.GradientDescentOptimizer(self._lr)
class Momentum(Optimizer):
def __init__(self, lr=1e-3, momentum=0.8):
Optimizer.__init__(self, lr)
self._opt = tf.train.MomentumOptimizer(self._lr, momentum)
class NAG(Optimizer):
def __init__(self, lr=1e-3, momentum=0.8):
Optimizer.__init__(self, lr)
self._opt = tf.train.MomentumOptimizer(self._lr, momentum, use_nesterov=True)
class AdaDelta(Optimizer):
def __init__(self, lr=1e-3, rho=0.95, eps=1e-8):
Optimizer.__init__(self, lr)
self._opt = tf.train.AdadeltaOptimizer(self._lr, rho, eps)
class AdaGrad(Optimizer):
def __init__(self, lr=1e-3, init=0.1):
Optimizer.__init__(self, lr)
self._opt = tf.train.AdagradOptimizer(self._lr, init)
class Adam(Optimizer):
def __init__(self, lr=1e-3, beta1=0.9, beta2=0.999, eps=1e-8):
Optimizer.__init__(self, lr)
self._opt = tf.train.AdamOptimizer(self._lr, beta1, beta2, eps)
class RMSProp(Optimizer):
def __init__(self, lr=1e-3, decay=0.9, momentum=0.0, eps=1e-10):
Optimizer.__init__(self, lr)
self._opt = tf.train.RMSPropOptimizer(self._lr, decay, momentum, eps)
# Factory
class OptFactory:
available_optimizers = {
"MBGD": MBGD, "Momentum": Momentum, "NAG": NAG,
"AdaDelta": AdaDelta, "AdaGrad": AdaGrad,
"Adam": Adam, "RMSProp": RMSProp
}
def get_optimizer_by_name(self, name, lr, *args, **kwargs):
try:
optimizer = self.available_optimizers[name](lr, *args, **kwargs)
return optimizer
except KeyError:
raise NotImplementedError("Undefined Optimizer '{}' found".format(name))
| __repr__ |
penalty.py | # -*- coding: utf-8 -*-
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
NbrOfNodes = 35
keygnra = ' TIME: GANDRA STEP: 80.000 FRAME: 1.000'
keystent = ' TIME: STENT STEP: 1.000 FRAME: 1.000'
keygnrb = ' TIME: GANDRB STEP: 100.000 FRAME: 1.000'
# File for gain parameter 01
#--------------------------------------------------------------------------
#--------------------------------------------------------------------------
file_g01 = open('surgery_p7.rsn', 'r')
gain01 = file_g01.readlines()
g01 = pd.Series(gain01)
g01 = g01.replace(r'\n','', regex=True)
g01 = g01.replace(r'\r\n','', regex=True)
g01 = g01.replace(r'\r','', regex=True)
index_Time_g01 = g01[g01.str.contains('TIME', case=False, regex=False)]
index_TimeValues_g01 = index_Time_g01.index.values
#--------------------------------------------------------------------------
G01 = {}
for idx in index_Time_g01.index.values:
index_start = idx + 1
index_end = index_start + NbrOfNodes
tmp_df = g01[index_start:index_end].str.strip()
tmp_df = tmp_df.str.split(' ',expand=True)
np.array(tmp_df.values, dtype=float)
G01[g01[idx]]=np.array(tmp_df.values, dtype=float)
#every mesh along time
Data_g01 = np.array([], dtype=np.int64)
Data_g01.shape = (-1, 7)
for key in sorted(G01.keys()):
Data_g01 = np.append(Data_g01,[G01[key][0,:]], axis=0)
#mesh for this particular key GNRA
Data_g01_gnra = np.array([], dtype=np.int64)
Data_g01_gnra.shape = (-1, 7)
for node in range(NbrOfNodes):
Data_g01_gnra = np.append(Data_g01_gnra,[G01[keygnra][node,:]], axis=0)
#mesh for this particular key STENT
Data_g01_stent = np.array([], dtype=np.int64)
Data_g01_stent.shape = (-1, 7)
for node in range(NbrOfNodes):
Data_g01_stent = np.append(Data_g01_stent,[G01[keystent][node,:]], axis=0)
#mesh for this particular key GNRB
Data_g01_gnrb = np.array([], dtype=np.int64)
Data_g01_gnrb.shape = (-1, 7)
for node in range(NbrOfNodes):
Data_g01_gnrb = np.append(Data_g01_gnrb,[G01[keygnrb][node,:]], axis=0)
Data_g01=Data_g01[np.argsort(Data_g01[:,0])]
#--------------------------------------------------------------------------
# File for gain parameter 02
#--------------------------------------------------------------------------
file_g02 = open('surgery_ref.rsn', 'r')
gain02 = file_g02.readlines()
g02 = pd.Series(gain02)
g02 = g02.replace(r'\n','', regex=True)
g02 = g02.replace(r'\r\n','', regex=True)
g02 = g02.replace(r'\r','', regex=True)
index_Time_g02 = g02[g02.str.contains('TIME', case=False, regex=False)]
index_TimeValues_g02 = index_Time_g02.index.values
#--------------------------------------------------------------------------
G02 = {}
for idx in index_Time_g02.index.values:
index_start = idx + 1
index_end = index_start + NbrOfNodes
tmp_df = g02[index_start:index_end].str.strip()
tmp_df = tmp_df.str.split(' ',expand=True)
np.array(tmp_df.values, dtype=float)
G02[g02[idx]]=np.array(tmp_df.values, dtype=float)
#every mesh along time
Data_g02 = np.array([], dtype=np.int64)
Data_g02.shape = (-1, 7)
for key in sorted(G02.keys()):
Data_g02 = np.append(Data_g02,[G02[key][0,:]], axis=0)
#mesh for this particular key GNRA
Data_g02_gnra = np.array([], dtype=np.int64)
Data_g02_gnra.shape = (-1, 7)
for node in range(NbrOfNodes):
Data_g02_gnra = np.append(Data_g02_gnra,[G02[keygnra][node,:]], axis=0)
#mesh for this particular key STENT
Data_g02_stent = np.array([], dtype=np.int64)
Data_g02_stent.shape = (-1, 7)
for node in range(NbrOfNodes):
Data_g02_stent = np.append(Data_g02_stent,[G02[keystent][node,:]], axis=0)
#mesh for this particular key GNRB
Data_g02_gnrb = np.array([], dtype=np.int64)
Data_g02_gnrb.shape = (-1, 7)
for node in range(NbrOfNodes):
Data_g02_gnrb = np.append(Data_g02_gnrb,[G02[keygnrb][node,:]], axis=0)
Data_g02=Data_g02[np.argsort(Data_g02[:,0])]
#--------------------------------------------------------------------------
# File for gain parameter 03
#--------------------------------------------------------------------------
file_g03 = open('surgery_p9.rsn', 'r')
gain03 = file_g03.readlines()
g03 = pd.Series(gain03)
g03 = g03.replace(r'\n','', regex=True)
g03 = g03.replace(r'\r\n','', regex=True)
g03 = g03.replace(r'\r','', regex=True)
index_Time_g03 = g03[g03.str.contains('TIME', case=False, regex=False)]
index_TimeValues_g03 = index_Time_g03.index.values
#--------------------------------------------------------------------------
G03 = {}
| tmp_df = g03[index_start:index_end].str.strip()
tmp_df = tmp_df.str.split(' ',expand=True)
np.array(tmp_df.values, dtype=float)
G03[g03[idx]]=np.array(tmp_df.values, dtype=float)
#every mesh along time
Data_g03 = np.array([], dtype=np.int64)
Data_g03.shape = (-1, 7)
for key in sorted(G03.keys()):
Data_g03 = np.append(Data_g03,[G03[key][0,:]], axis=0)
#mesh for this particular key GNRA
Data_g03_gnra = np.array([], dtype=np.int64)
Data_g03_gnra.shape = (-1, 7)
for node in range(NbrOfNodes):
Data_g03_gnra = np.append(Data_g03_gnra,[G03[keygnra][node,:]], axis=0)
#mesh for this particular key STENT
Data_g03_stent = np.array([], dtype=np.int64)
Data_g03_stent.shape = (-1, 7)
for node in range(NbrOfNodes):
Data_g03_stent = np.append(Data_g03_stent,[G03[keystent][node,:]], axis=0)
#mesh for this particular key GNRB
Data_g03_gnrb = np.array([], dtype=np.int64)
Data_g03_gnrb.shape = (-1, 7)
for node in range(NbrOfNodes):
Data_g03_gnrb = np.append(Data_g03_gnrb,[G03[keygnrb][node,:]], axis=0)
Data_g03=Data_g03[np.argsort(Data_g03[:,0])]
#--------------------------------------------------------------------------
fig = plt.figure()
plt.rcParams.update({'font.size': 5})
plt.rc('text', usetex=False)
plt.subplot(4,3,1)
plt.plot(Data_g01[:,0],Data_g01[:,4]/1000.0,'b',label='Penalty=1*10^7',linewidth=1.0,markersize=10)
plt.plot(Data_g02[:,0],Data_g02[:,4]/1000.0,'r',label='Penalty=1*10^5',linewidth=1.0,markersize=10)
plt.plot(Data_g03[:,0],Data_g03[:,4]/1000.0,'g',label='Penalty=1*10^9',linewidth=1.0,markersize=10)
plt.text(0.5,0.05,r'Time [months]', {'color': 'k', 'fontsize': 6},
ha='center',va='center',clip_on=False,transform=plt.gca().transAxes)
plt.text(0.05, 0.5, r'Axial Stress [kPa]',{'color': 'k', 'fontsize': 6,},
ha='left',va='center',rotation=90,clip_on=False,transform=plt.gca().transAxes)
plt.text(0.95, 0.95, r'a',{'color': 'k', 'fontsize': 6,
'bbox': dict(boxstyle="round", fc="w", ec="k", pad=0.2)},
ha='right',va='top',transform=plt.gca().transAxes)
plt.axis([0,180,0,150])
plt.subplot(4,3,2)
plt.plot(Data_g01[:,0],Data_g01[:,5]/1000.0,'b',label='Penalty=1*10^7',linewidth=1.0,markersize=10)
plt.plot(Data_g02[:,0],Data_g02[:,5]/1000.0,'r',label='Penalty=1*10^5',linewidth=1.0,markersize=10)
plt.plot(Data_g03[:,0],Data_g03[:,5]/1000.0,'g',label='Penalty=1*10^9',linewidth=1.0,markersize=10)
plt.text(0.5,0.05,r'Time [months]', {'color': 'k', 'fontsize': 6},
ha='center',va='center',clip_on=False,transform=plt.gca().transAxes)
plt.text(0.05, 0.5, r'Circumferential Stress [kPa]',{'color': 'k', 'fontsize': 6,},
ha='left',va='center',rotation=90,clip_on=False,transform=plt.gca().transAxes)
plt.text(0.95, 0.95, r'b',{'color': 'k', 'fontsize': 6,
'bbox': dict(boxstyle="round", fc="w", ec="k", pad=0.2)},
ha='right',va='top',transform=plt.gca().transAxes)
plt.legend(loc='center right')
plt.axis([0,180,0,350])
plt.subplot(4,3,3)
plt.plot(Data_g01[:,0],Data_g01[:,3]*1000.0,'b',label='Penalty=1*10^7',linewidth=1.0,markersize=10)
plt.plot(Data_g02[:,0],Data_g02[:,3]*1000.0,'r',label='Penalty=1*10^5',linewidth=1.0,markersize=10)
plt.plot(Data_g03[:,0],Data_g03[:,3]*1000.0,'g',label='Penalty=1*10^9',linewidth=1.0,markersize=10)
plt.text(0.5,0.05,r'Time [months]', {'color': 'k', 'fontsize': 6},
ha='center',va='center',clip_on=False,transform=plt.gca().transAxes)
plt.text(0.05, 0.5, r'Radius [mm]',{'color': 'k', 'fontsize': 6,},
ha='left',va='center',rotation=90,clip_on=False,transform=plt.gca().transAxes)
plt.text(0.95, 0.95, r'c',{'color': 'k', 'fontsize': 6,
'bbox': dict(boxstyle="round", fc="w", ec="k", pad=0.2)},
ha='right',va='top',transform=plt.gca().transAxes)
plt.axis([0,180,10,13])
plt.subplot(4,3,4)
plt.plot(Data_g01_gnra[:,2]*1000.0,Data_g01_gnra[:,4]/1000.0,'b',label='Penalty=1*10^7',linewidth=1.0,markersize=10)
plt.plot(Data_g02_gnra[:,2]*1000.0,Data_g02_gnra[:,4]/1000.0,'r',label='Penalty=1*10^5',linewidth=1.0,markersize=10)
plt.plot(Data_g03_gnra[:,2]*1000.0,Data_g03_gnra[:,4]/1000.0,'g',label='Penalty=1*10^9',linewidth=1.0,markersize=10)
plt.text(0.5,0.05,r'Axial position [mm]', {'color': 'k', 'fontsize': 6},
ha='center',va='center',clip_on=False,transform=plt.gca().transAxes)
plt.text(0.05, 0.5, r'Axial Stress [kPa]',{'color': 'k', 'fontsize': 6,},
ha='left',va='center',rotation=90,clip_on=False,transform=plt.gca().transAxes)
plt.text(0.95, 0.95, r'd',{'color': 'k', 'fontsize': 6,
'bbox': dict(boxstyle="round", fc="w", ec="k", pad=0.2)},
ha='right',va='top',transform=plt.gca().transAxes)
plt.axis([0,100,0,150])
plt.subplot(4,3,5)
plt.plot(Data_g01_gnra[:,2]*1000.0,Data_g01_gnra[:,5]/1000.0,'b',label='Penalty=1*10^7',linewidth=1.0,markersize=10)
plt.plot(Data_g02_gnra[:,2]*1000.0,Data_g02_gnra[:,5]/1000.0,'r',label='Penalty=1*10^5',linewidth=1.0,markersize=10)
plt.plot(Data_g03_gnra[:,2]*1000.0,Data_g03_gnra[:,5]/1000.0,'g',label='Penalty=1*10^9',linewidth=1.0,markersize=10)
plt.text(0.5,0.05,r'Axial position [mm]', {'color': 'k', 'fontsize': 6},
ha='center',va='center',clip_on=False,transform=plt.gca().transAxes)
plt.text(0.05, 0.5, r'Circumferential Stress [kPa]',{'color': 'k', 'fontsize': 6,},
ha='left',va='center',rotation=90,clip_on=False,transform=plt.gca().transAxes)
plt.text(0.95, 0.95, r'e',{'color': 'k', 'fontsize': 6,
'bbox': dict(boxstyle="round", fc="w", ec="k", pad=0.2)},
ha='right',va='top',transform=plt.gca().transAxes)
plt.axis([0,100,0,350])
plt.subplot(4,3,6)
plt.plot(Data_g01_gnra[:,2]*1000.0,Data_g01_gnra[:,3]*1000.0,'b',label='Penalty=1*10^7',linewidth=1.0,markersize=10)
plt.plot(Data_g02_gnra[:,2]*1000.0,Data_g02_gnra[:,3]*1000.0,'r',label='Penalty=1*10^5',linewidth=1.0,markersize=10)
plt.plot(Data_g03_gnra[:,2]*1000.0,Data_g03_gnra[:,3]*1000.0,'g',label='Penalty=1*10^9',linewidth=1.0,markersize=10)
plt.text(0.5,0.05,r'Axial position [mm]', {'color': 'k', 'fontsize': 6},
ha='center',va='center',clip_on=False,transform=plt.gca().transAxes)
plt.text(0.05, 0.5, r'Radius [mm]',{'color': 'k', 'fontsize': 6,},
ha='left',va='center',rotation=90,clip_on=False,transform=plt.gca().transAxes)
plt.text(0.95, 0.95, r'f',{'color': 'k', 'fontsize': 6,
'bbox': dict(boxstyle="round", fc="w", ec="k", pad=0.2)},
ha='right',va='top',transform=plt.gca().transAxes)
plt.axis([0,100,10,13])
plt.subplot(4,3,7)
plt.plot(Data_g01_stent[:,2]*1000.0,Data_g01_stent[:,4]/1000.0,'b',label='Penalty=1*10^7',linewidth=1.0,markersize=10)
plt.plot(Data_g02_stent[:,2]*1000.0,Data_g02_stent[:,4]/1000.0,'r',label='Penalty=1*10^5',linewidth=1.0,markersize=10)
plt.plot(Data_g03_stent[:,2]*1000.0,Data_g03_stent[:,4]/1000.0,'g',label='Penalty=1*10^9',linewidth=1.0,markersize=10)
plt.text(0.5,0.05,r'Axial position [mm]', {'color': 'k', 'fontsize': 6},
ha='center',va='center',clip_on=False,transform=plt.gca().transAxes)
plt.text(0.05, 0.5, r'Axial Stress [kPa]',{'color': 'k', 'fontsize': 6,},
ha='left',va='center',rotation=90,clip_on=False,transform=plt.gca().transAxes)
plt.text(0.95, 0.95, r'g',{'color': 'k', 'fontsize': 6,
'bbox': dict(boxstyle="round", fc="w", ec="k", pad=0.2)},
ha='right',va='top',transform=plt.gca().transAxes)
plt.axis([0,100,0,150])
plt.subplot(4,3,8)
plt.plot(Data_g01_stent[:,2]*1000.0,Data_g01_stent[:,5]/1000.0,'b',label='Penalty=1*10^7',linewidth=1.0,markersize=10)
plt.plot(Data_g02_stent[:,2]*1000.0,Data_g02_stent[:,5]/1000.0,'r',label='Penalty=1*10^5',linewidth=1.0,markersize=10)
plt.plot(Data_g03_stent[:,2]*1000.0,Data_g03_stent[:,5]/1000.0,'g',label='Penalty=1*10^9',linewidth=1.0,markersize=10)
plt.text(0.5,0.05,r'Axial position [mm]', {'color': 'k', 'fontsize': 6},
ha='center',va='center',clip_on=False,transform=plt.gca().transAxes)
plt.text(0.05, 0.5, r'Circumferential Stress [kPa]',{'color': 'k', 'fontsize': 6,},
ha='left',va='center',rotation=90,clip_on=False,transform=plt.gca().transAxes)
plt.text(0.95, 0.95, r'h',{'color': 'k', 'fontsize': 6,
'bbox': dict(boxstyle="round", fc="w", ec="k", pad=0.2)},
ha='right',va='top',transform=plt.gca().transAxes)
plt.axis([0,100,0,350])
plt.subplot(4,3,9)
plt.plot(Data_g01_stent[:,2]*1000.0,Data_g01_stent[:,3]*1000.0,'b',label='Penalty=1*10^7',linewidth=1.0,markersize=10)
plt.plot(Data_g02_stent[:,2]*1000.0,Data_g02_stent[:,3]*1000.0,'r',label='Penalty=1*10^5',linewidth=1.0,markersize=10)
plt.plot(Data_g03_stent[:,2]*1000.0,Data_g03_stent[:,3]*1000.0,'g',label='Penalty=1*10^9',linewidth=1.0,markersize=10)
plt.text(0.5,0.05,r'Axial position [mm]', {'color': 'k', 'fontsize': 6},
ha='center',va='center',clip_on=False,transform=plt.gca().transAxes)
plt.text(0.05, 0.5, r'Radius [mm]',{'color': 'k', 'fontsize': 6,},
ha='left',va='center',rotation=90,clip_on=False,transform=plt.gca().transAxes)
plt.text(0.95, 0.95, r'i',{'color': 'k', 'fontsize': 6,
'bbox': dict(boxstyle="round", fc="w", ec="k", pad=0.2)},
ha='right',va='top',transform=plt.gca().transAxes)
plt.axis([0,100,10,13])
plt.subplot(4,3,10)
plt.plot(Data_g01_gnrb[:,2]*1000.0,Data_g01_gnrb[:,4]/1000.0,'b',label='Penalty=1*10^7',linewidth=1.0,markersize=10)
plt.plot(Data_g02_gnrb[:,2]*1000.0,Data_g02_gnrb[:,4]/1000.0,'r',label='Penalty=1*10^5',linewidth=1.0,markersize=10)
plt.plot(Data_g03_gnrb[:,2]*1000.0,Data_g03_gnrb[:,4]/1000.0,'g',label='Penalty=1*10^9',linewidth=1.0,markersize=10)
plt.text(0.5,0.05,r'Axial position [mm]', {'color': 'k', 'fontsize': 6},
ha='center',va='center',clip_on=False,transform=plt.gca().transAxes)
plt.text(0.05, 0.5, r'Axial Stress [kPa]',{'color': 'k', 'fontsize': 6,},
ha='left',va='center',rotation=90,clip_on=False,transform=plt.gca().transAxes)
plt.text(0.95, 0.95, r'j',{'color': 'k', 'fontsize': 6,
'bbox': dict(boxstyle="round", fc="w", ec="k", pad=0.2)},
ha='right',va='top',transform=plt.gca().transAxes)
plt.axis([0,100,0,150])
plt.subplot(4,3,11)
plt.plot(Data_g01_gnrb[:,2]*1000.0,Data_g01_gnrb[:,5]/1000.0,'b',label='Penalty=1*10^7',linewidth=1.0,markersize=10)
plt.plot(Data_g02_gnrb[:,2]*1000.0,Data_g02_gnrb[:,5]/1000.0,'r',label='Penalty=1*10^5',linewidth=1.0,markersize=10)
plt.plot(Data_g03_gnrb[:,2]*1000.0,Data_g03_gnrb[:,5]/1000.0,'g',label='Penalty=1*10^9',linewidth=1.0,markersize=10)
plt.text(0.5,0.05,r'Axial position [mm]', {'color': 'k', 'fontsize': 6},
ha='center',va='center',clip_on=False,transform=plt.gca().transAxes)
plt.text(0.05, 0.5, r'Circumferential Stress [kPa]',{'color': 'k', 'fontsize': 6,},
ha='left',va='center',rotation=90,clip_on=False,transform=plt.gca().transAxes)
plt.text(0.95, 0.95, r'k',{'color': 'k', 'fontsize': 6,
'bbox': dict(boxstyle="round", fc="w", ec="k", pad=0.2)},
ha='right',va='top',transform=plt.gca().transAxes)
plt.axis([0,100,0,350])
plt.subplot(4,3,12)
plt.plot(Data_g01_gnrb[:,2]*1000.0,Data_g01_gnrb[:,3]*1000.0,'b',label='Penalty=1*10^7',linewidth=1.0,markersize=10)
plt.plot(Data_g02_gnrb[:,2]*1000.0,Data_g02_gnrb[:,3]*1000.0,'r',label='Penalty=1*10^5',linewidth=1.0,markersize=10)
plt.plot(Data_g03_gnrb[:,2]*1000.0,Data_g03_gnrb[:,3]*1000.0,'g',label='Penalty=1*10^9',linewidth=1.0,markersize=10)
plt.text(0.5,0.05,r'Axial position [mm]', {'color': 'k', 'fontsize': 6},
ha='center',va='center',clip_on=False,transform=plt.gca().transAxes)
plt.text(0.05, 0.5, r'Radius [mm]',{'color': 'k', 'fontsize': 6,},
ha='left',va='center',rotation=90,clip_on=False,transform=plt.gca().transAxes)
plt.text(0.95, 0.95, r'l',{'color': 'k', 'fontsize': 6,
'bbox': dict(boxstyle="round", fc="w", ec="k", pad=0.2)},
ha='right',va='top',transform=plt.gca().transAxes)
plt.axis([0,100,10,13])
fig.tight_layout()
plt.show
FIGURENAME = 'penalty.eps'
plt.savefig(FIGURENAME)
plt.savefig(fname=FIGURENAME,
dpi=None,
facecolor='w',
edgecolor='w',
orientation='portrait',
format=None,
transparent=False,
bbox_inches=None,
pad_inches=0.1,
frameon=None,
metadata=None)
plt.close('all')
"""
#--------------------------------------------------------------------------
radii = (Data_g02[-1,3]*1000.0, Data_g01[-1,3]*1000.0, Data_g03[-1,3]*1000.0)
fig, ax = plt.subplots()
index = np.arange(3)
bar_width = 0.45
opacity = 0.4
error_config = {'ecolor': '0.3'}
rects1 = ax.bar(index, radii, bar_width,
alpha=opacity, color='b',
error_kw=error_config, label='Penalty')
ax.set_xlabel('Penalty')
ax.set_ylabel('Radius [mm]')
ax.set_xticks(index + bar_width / 2)
ax.set_xticklabels(('1e5', '1e7', '1e9'))
plt.axis([-0.25,2.7,0,20])
fig.tight_layout()
plt.show
FIGURENAME = 'sensitivity_penalty.eps'
plt.savefig(FIGURENAME)
plt.savefig(fname=FIGURENAME,
dpi=None,
facecolor='w',
edgecolor='w',
orientation='portrait',
format=None,
transparent=False,
bbox_inches=None,
pad_inches=0.1,
frameon=None,
metadata=None)
plt.close('all')
"""
#-------------------------------------------------------------------------- | for idx in index_Time_g03.index.values:
index_start = idx + 1
index_end = index_start + NbrOfNodes
|
dist_model.py |
from __future__ import absolute_import
import sys
import numpy as np
import torch
from torch import nn
import os
from collections import OrderedDict
from torch.autograd import Variable
import itertools
from .base_model import BaseModel
from scipy.ndimage import zoom
import fractions
import functools
import skimage.transform
from tqdm import tqdm
from . import networks_basic as networks
from . import perceptual_loss
class DistModel(BaseModel):
def name(self):
return self.model_name
def initialize(self, model='net-lin', net='alex', colorspace='Lab', pnet_rand=False, pnet_tune=False, model_path=None,
use_gpu=True, printNet=False, spatial=False,
is_train=False, lr=.0001, beta1=0.5, version='0.1', gpu_ids=[0]):
'''
INPUTS
model - ['net-lin'] for linearly calibrated network
['net'] for off-the-shelf network
['L2'] for L2 distance in Lab colorspace
['SSIM'] for ssim in RGB colorspace
net - ['squeeze','alex','vgg']
model_path - if None, will look in weights/[NET_NAME].pth
colorspace - ['Lab','RGB'] colorspace to use for L2 and SSIM
use_gpu - bool - whether or not to use a GPU
printNet - bool - whether or not to print network architecture out
spatial - bool - whether to output an array containing varying distances across spatial dimensions
is_train - bool - [True] for training mode
lr - float - initial learning rate
beta1 - float - initial momentum term for adam
version - 0.1 for latest, 0.0 was original (with a bug)
gpu_ids - int array - [0] by default, gpus to use
'''
BaseModel.initialize(self, use_gpu=use_gpu, gpu_ids=gpu_ids)
self.model = model
self.net = net
self.is_train = is_train
self.spatial = spatial
self.gpu_ids = gpu_ids
self.model_name = '%s [%s]'%(model,net)
if(self.model == 'net-lin'): # pretrained net + linear layer
self.net = networks.PNetLin(pnet_rand=pnet_rand, pnet_tune=pnet_tune, pnet_type=net,
use_dropout=True, spatial=spatial, version=version, lpips=True)
kw = {}
if not use_gpu:
kw['map_location'] = 'cpu'
if(model_path is None):
import inspect
model_path = os.path.abspath(os.path.join(inspect.getfile(self.initialize), '..', 'weights/v%s/%s.pth'%(version,net)))
if(not is_train):
print('Loading model from: %s'%model_path)
self.net.load_state_dict(torch.load(model_path, **kw), strict=False)
elif(self.model=='net'): # pretrained network
self.net = networks.PNetLin(pnet_rand=pnet_rand, pnet_type=net, lpips=False)
elif(self.model in ['L2','l2']):
self.net = networks.L2(use_gpu=use_gpu,colorspace=colorspace) # not really a network, only for testing
self.model_name = 'L2'
elif(self.model in ['DSSIM','dssim','SSIM','ssim']):
self.net = networks.DSSIM(use_gpu=use_gpu,colorspace=colorspace)
self.model_name = 'SSIM'
else:
raise ValueError("Model [%s] not recognized." % self.model)
self.parameters = list(self.net.parameters())
if self.is_train: # training mode
# extra network on top to go from distances (d0,d1) => predicted human judgment (h*)
self.rankLoss = networks.BCERankingLoss()
self.parameters += list(self.rankLoss.net.parameters())
self.lr = lr
self.old_lr = lr
self.optimizer_net = torch.optim.Adam(self.parameters, lr=lr, betas=(beta1, 0.999))
else: # test mode
self.net.eval()
if(use_gpu):
self.net.to(gpu_ids[0])
self.net = torch.nn.DataParallel(self.net, device_ids=gpu_ids)
if(self.is_train):
self.rankLoss = self.rankLoss.to(device=gpu_ids[0]) # just put this on GPU0
if(printNet):
print('---------- Networks initialized -------------')
networks.print_network(self.net)
print('-----------------------------------------------')
def forward(self, in0, in1, retPerLayer=False):
''' Function computes the distance between image patches in0 and in1
INPUTS
in0, in1 - torch.Tensor object of shape Nx3xXxY - image patch scaled to [-1,1]
OUTPUT
computed distances between in0 and in1
'''
return self.net.forward(in0, in1, retPerLayer=retPerLayer)
# ***** TRAINING FUNCTIONS *****
def optimize_parameters(self):
self.forward_train()
self.optimizer_net.zero_grad()
self.backward_train()
self.optimizer_net.step()
self.clamp_weights()
def clamp_weights(self):
for module in self.net.modules():
if(hasattr(module, 'weight') and module.kernel_size==(1,1)):
module.weight.data = torch.clamp(module.weight.data,min=0)
def set_input(self, data):
|
def forward_train(self): # run forward pass
# print(self.net.module.scaling_layer.shift)
# print(torch.norm(self.net.module.net.slice1[0].weight).item(), torch.norm(self.net.module.lin0.model[1].weight).item())
self.d0 = self.forward(self.var_ref, self.var_p0)
self.d1 = self.forward(self.var_ref, self.var_p1)
self.acc_r = self.compute_accuracy(self.d0,self.d1,self.input_judge)
self.var_judge = Variable(1.*self.input_judge).view(self.d0.size())
self.loss_total = self.rankLoss.forward(self.d0, self.d1, self.var_judge*2.-1.)
return self.loss_total
def backward_train(self):
torch.mean(self.loss_total).backward()
def compute_accuracy(self,d0,d1,judge):
''' d0, d1 are Variables, judge is a Tensor '''
d1_lt_d0 = (d1<d0).cpu().data.numpy().flatten()
judge_per = judge.cpu().numpy().flatten()
return d1_lt_d0*judge_per + (1-d1_lt_d0)*(1-judge_per)
def get_current_errors(self):
retDict = OrderedDict([('loss_total', self.loss_total.data.cpu().numpy()),
('acc_r', self.acc_r)])
for key in retDict.keys():
retDict[key] = np.mean(retDict[key])
return retDict
def get_current_visuals(self):
zoom_factor = 256/self.var_ref.data.size()[2]
ref_img = util.tensor2im(self.var_ref.data)
p0_img = util.tensor2im(self.var_p0.data)
p1_img = util.tensor2im(self.var_p1.data)
ref_img_vis = zoom(ref_img,[zoom_factor, zoom_factor, 1],order=0)
p0_img_vis = zoom(p0_img,[zoom_factor, zoom_factor, 1],order=0)
p1_img_vis = zoom(p1_img,[zoom_factor, zoom_factor, 1],order=0)
return OrderedDict([('ref', ref_img_vis),
('p0', p0_img_vis),
('p1', p1_img_vis)])
def save(self, path, label):
if(self.use_gpu):
self.save_network(self.net.module, path, '', label)
else:
self.save_network(self.net, path, '', label)
self.save_network(self.rankLoss.net, path, 'rank', label)
def update_learning_rate(self,nepoch_decay):
lrd = self.lr / nepoch_decay
lr = self.old_lr - lrd
for param_group in self.optimizer_net.param_groups:
param_group['lr'] = lr
print('update lr [%s] decay: %f -> %f' % (type,self.old_lr, lr))
self.old_lr = lr
def score_2afc_dataset(data_loader, func, name=''):
''' Function computes Two Alternative Forced Choice (2AFC) score using
distance function 'func' in dataset 'data_loader'
INPUTS
data_loader - CustomDatasetDataLoader object - contains a TwoAFCDataset inside
func - callable distance function - calling d=func(in0,in1) should take 2
pytorch tensors with shape Nx3xXxY, and return numpy array of length N
OUTPUTS
[0] - 2AFC score in [0,1], fraction of time func agrees with human evaluators
[1] - dictionary with following elements
d0s,d1s - N arrays containing distances between reference patch to perturbed patches
gts - N array in [0,1], preferred patch selected by human evaluators
(closer to "0" for left patch p0, "1" for right patch p1,
"0.6" means 60pct people preferred right patch, 40pct preferred left)
scores - N array in [0,1], corresponding to what percentage function agreed with humans
CONSTS
N - number of test triplets in data_loader
'''
d0s = []
d1s = []
gts = []
for data in tqdm(data_loader.load_data(), desc=name):
d0s+=func(data['ref'],data['p0']).data.cpu().numpy().flatten().tolist()
d1s+=func(data['ref'],data['p1']).data.cpu().numpy().flatten().tolist()
gts+=data['judge'].cpu().numpy().flatten().tolist()
d0s = np.array(d0s)
d1s = np.array(d1s)
gts = np.array(gts)
scores = (d0s<d1s)*(1.-gts) + (d1s<d0s)*gts + (d1s==d0s)*.5
return(np.mean(scores), dict(d0s=d0s,d1s=d1s,gts=gts,scores=scores))
def score_jnd_dataset(data_loader, func, name=''):
''' Function computes JND score using distance function 'func' in dataset 'data_loader'
INPUTS
data_loader - CustomDatasetDataLoader object - contains a JNDDataset inside
func - callable distance function - calling d=func(in0,in1) should take 2
pytorch tensors with shape Nx3xXxY, and return pytorch array of length N
OUTPUTS
[0] - JND score in [0,1], mAP score (area under precision-recall curve)
[1] - dictionary with following elements
ds - N array containing distances between two patches shown to human evaluator
sames - N array containing fraction of people who thought the two patches were identical
CONSTS
N - number of test triplets in data_loader
'''
ds = []
gts = []
for data in tqdm(data_loader.load_data(), desc=name):
ds+=func(data['p0'],data['p1']).data.cpu().numpy().tolist()
gts+=data['same'].cpu().numpy().flatten().tolist()
sames = np.array(gts)
ds = np.array(ds)
sorted_inds = np.argsort(ds)
ds_sorted = ds[sorted_inds]
sames_sorted = sames[sorted_inds]
TPs = np.cumsum(sames_sorted)
FPs = np.cumsum(1-sames_sorted)
FNs = np.sum(sames_sorted)-TPs
precs = TPs/(TPs+FPs)
recs = TPs/(TPs+FNs)
score = util.voc_ap(recs,precs)
return(score, dict(ds=ds,sames=sames))
| self.input_ref = data['ref']
self.input_p0 = data['p0']
self.input_p1 = data['p1']
self.input_judge = data['judge']
if(self.use_gpu):
self.input_ref = self.input_ref.to(device=self.gpu_ids[0])
self.input_p0 = self.input_p0.to(device=self.gpu_ids[0])
self.input_p1 = self.input_p1.to(device=self.gpu_ids[0])
self.input_judge = self.input_judge.to(device=self.gpu_ids[0])
self.var_ref = Variable(self.input_ref,requires_grad=True)
self.var_p0 = Variable(self.input_p0,requires_grad=True)
self.var_p1 = Variable(self.input_p1,requires_grad=True) |
distance.go | // Copyright ©2015 The Gonum Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package network
import (
"math"
"github.com/neoyagami/gonum/graph"
"github.com/neoyagami/gonum/graph/path"
)
// Closeness returns the closeness centrality for nodes in the graph g used to
// construct the given shortest paths.
//
// C(v) = 1 / \sum_u d(u,v)
//
// For directed graphs the incoming paths are used. Infinite distances are
// not considered.
func Closeness(g graph.Graph, p path.AllShortest) map[int64]float64 {
nodes := graph.NodesOf(g.Nodes())
c := make(map[int64]float64, len(nodes))
for _, u := range nodes {
uid := u.ID()
var sum float64
for _, v := range nodes {
vid := v.ID()
// The ordering here is not relevant for
// undirected graphs, but we make sure we
// are counting incoming paths.
d := p.Weight(vid, uid)
if math.IsInf(d, 0) {
continue
}
sum += d
}
c[u.ID()] = 1 / sum
}
return c
}
// Farness returns the farness for nodes in the graph g used to construct
// the given shortest paths.
//
// F(v) = \sum_u d(u,v)
//
// For directed graphs the incoming paths are used. Infinite distances are
// not considered.
func Farness(g graph.Graph, p path.AllShortest) map[int64]float64 {
nodes := graph.NodesOf(g.Nodes())
f := make(map[int64]float64, len(nodes))
for _, u := range nodes {
uid := u.ID()
var sum float64
for _, v := range nodes {
vid := v.ID()
// The ordering here is not relevant for
// undirected graphs, but we make sure we
// are counting incoming paths.
d := p.Weight(vid, uid)
if math.IsInf(d, 0) {
continue
}
sum += d
}
f[u.ID()] = sum
}
return f
}
// Harmonic returns the harmonic centrality for nodes in the graph g used to
// construct the given shortest paths.
//
// H(v)= \sum_{u ≠ v} 1 / d(u,v)
//
// For directed graphs the incoming paths are used. Infinite distances are
// not considered.
func Harmonic(g graph.Graph, p path.AllShortest) map[int64]float64 {
nodes := graph.NodesOf(g.Nodes())
h := make(map[int64]float64, len(nodes))
for i, u := range nodes {
uid := u.ID()
var sum float64
for j, v := range nodes {
vid := v.ID()
// The ordering here is not relevant for
// undirected graphs, but we make sure we
// are counting incoming paths.
d := p.Weight(vid, uid)
if math.IsInf(d, 0) {
continue
}
if i != j {
sum += 1 / d
}
}
h[u.ID()] = sum
}
return h
}
// Residual returns the Dangalchev's residual closeness for nodes in the graph
// g used to construct the given shortest paths.
//
// C(v)= \sum_{u ≠ v} 1 / 2^d(u,v)
//
// For directed graphs the incoming paths are used. Infinite distances are
// not considered.
func Residual(g graph.Graph, p path.AllShortest) map[int64]float64 {
nodes := graph.NodesOf(g.Nodes())
r := make(map[int64]float64, len(nodes))
for i, u := range nodes {
uid := u.ID()
var sum float64
for j, v := range nodes {
vid := v.ID()
// The ordering here is not relevant for
// undirected graphs, but we make sure we
// are counting incoming paths.
d := p.Weight(vid, uid)
if math.IsInf(d, 0) {
continue
}
if i != j {
| r[u.ID()] = sum
}
return r
}
| sum += math.Exp2(-d)
}
}
|
tree_drag_source.rs | // Take a look at the license at the top of the repository in the LICENSE file.
use crate::{TreeDragSource, TreePath};
use glib::subclass::prelude::*;
use glib::translate::*;
use glib::Cast;
pub trait TreeDragSourceImpl: ObjectImpl {
fn row_draggable(&self, tree_drag_source: &Self::Type, path: &TreePath) -> bool {
self.parent_row_draggable(tree_drag_source, path)
}
fn drag_data_get(&self, tree_drag_source: &Self::Type, path: &TreePath)
-> gdk::ContentProvider;
fn drag_data_delete(&self, tree_drag_source: &Self::Type, path: &TreePath) -> bool;
}
pub trait TreeDragSourceImplExt: ObjectSubclass {
fn parent_row_draggable(&self, _tree_drag_source: &Self::Type, _path: &TreePath) -> bool;
fn parent_drag_data_get(
&self,
tree_drag_source: &Self::Type,
path: &TreePath,
) -> gdk::ContentProvider;
fn parent_drag_data_delete(&self, tree_drag_source: &Self::Type, path: &TreePath) -> bool;
}
impl<T: TreeDragSourceImpl> TreeDragSourceImplExt for T {
fn parent_row_draggable(&self, tree_drag_source: &Self::Type, path: &TreePath) -> bool {
unsafe {
let type_data = Self::type_data();
let parent_iface = type_data.as_ref().get_parent_interface::<TreeDragSource>()
as *const ffi::GtkTreeDragSourceIface;
if let Some(func) = (*parent_iface).row_draggable {
from_glib(func(
tree_drag_source
.unsafe_cast_ref::<TreeDragSource>()
.to_glib_none()
.0,
mut_override(path.to_glib_none().0),
))
} else {
// Assume the row is draggable by default
true
}
}
}
fn parent_drag_data_get(
&self,
tree_drag_source: &Self::Type,
path: &TreePath,
) -> gdk::ContentProvider {
unsafe {
let type_data = Self::type_data();
let parent_iface = type_data.as_ref().get_parent_interface::<TreeDragSource>()
as *const ffi::GtkTreeDragSourceIface;
let func = (*parent_iface)
.drag_data_get
.expect("no parent \"drag_data_get\" implementation");
from_glib_full(func(
tree_drag_source
.unsafe_cast_ref::<TreeDragSource>()
.to_glib_none()
.0,
mut_override(path.to_glib_none().0),
))
}
}
fn parent_drag_data_delete(&self, tree_drag_source: &Self::Type, path: &TreePath) -> bool {
unsafe {
let type_data = Self::type_data();
let parent_iface = type_data.as_ref().get_parent_interface::<TreeDragSource>()
as *const ffi::GtkTreeDragSourceIface;
let func = (*parent_iface)
.drag_data_delete
.expect("no parent \"drag_data_delete\" implementation");
from_glib(func(
tree_drag_source
.unsafe_cast_ref::<TreeDragSource>()
.to_glib_none()
.0,
mut_override(path.to_glib_none().0),
))
}
}
}
unsafe impl<T: TreeDragSourceImpl> IsImplementable<T> for TreeDragSource {
fn | (iface: &mut glib::Interface<Self>) {
let iface = iface.as_mut();
iface.row_draggable = Some(tree_drag_source_row_draggable::<T>);
iface.drag_data_get = Some(tree_drag_source_drag_data_get::<T>);
iface.drag_data_delete = Some(tree_drag_source_drag_data_delete::<T>);
}
fn instance_init(_instance: &mut glib::subclass::InitializingObject<T>) {}
}
unsafe extern "C" fn tree_drag_source_row_draggable<T: TreeDragSourceImpl>(
tree_drag_source: *mut ffi::GtkTreeDragSource,
pathptr: *mut ffi::GtkTreePath,
) -> glib::ffi::gboolean {
let instance = &*(tree_drag_source as *mut T::Instance);
let imp = instance.get_impl();
let path: Borrowed<TreePath> = from_glib_borrow(pathptr);
imp.row_draggable(
from_glib_borrow::<_, TreeDragSource>(tree_drag_source).unsafe_cast_ref(),
&path,
)
.to_glib()
}
unsafe extern "C" fn tree_drag_source_drag_data_get<T: TreeDragSourceImpl>(
tree_drag_source: *mut ffi::GtkTreeDragSource,
pathptr: *mut ffi::GtkTreePath,
) -> *mut gdk::ffi::GdkContentProvider {
let instance = &*(tree_drag_source as *mut T::Instance);
let imp = instance.get_impl();
let path: Borrowed<TreePath> = from_glib_borrow(pathptr);
imp.drag_data_get(
from_glib_borrow::<_, TreeDragSource>(tree_drag_source).unsafe_cast_ref(),
&path,
)
.to_glib_full()
}
unsafe extern "C" fn tree_drag_source_drag_data_delete<T: TreeDragSourceImpl>(
tree_drag_source: *mut ffi::GtkTreeDragSource,
pathptr: *mut ffi::GtkTreePath,
) -> glib::ffi::gboolean {
let instance = &*(tree_drag_source as *mut T::Instance);
let imp = instance.get_impl();
let path: Borrowed<TreePath> = from_glib_borrow(pathptr);
imp.drag_data_delete(
from_glib_borrow::<_, TreeDragSource>(tree_drag_source).unsafe_cast_ref(),
&path,
)
.to_glib()
}
| interface_init |
indices_test.go | package contextionary
import (
"testing"
"io/ioutil"
"os"
"fmt"
"github.com/creativesoftwarefdn/weaviate/contextionary/generator"
)
// Test data
// Changing this data might invalidate the nearest neighbours test!
var vectorTests = []struct {
word string
vec []float32
}{
{ "apple", []float32{1, 0, 0} },
{ "pie", []float32{0, 1, 0} },
{ "computer", []float32{0, 0, 1} },
{ "fruit", []float32{0.8, 0, 0} },
{ "company", []float32{0, 0, 2} },
}
func TestMMappedIndex(t *testing.T) {
tempdir, err := ioutil.TempDir("", "weaviate-vector-test")
if err != nil {
t.Errorf("Could not create temporary directory, %v", err)
}
defer os.RemoveAll(tempdir)
// First generate the csv input fileformat based on the test data.
var dataset = ""
for i := 0; i < len(vectorTests); i++ {
vt := vectorTests[i]
dataset += vt.word + " "
for j := 0; j < len(vt.vec) - 1; j++ {
dataset += fmt.Sprintf("%f ", vt.vec[j])
}
dataset += fmt.Sprintf("%f\n", vt.vec[len(vt.vec)-1])
}
err = ioutil.WriteFile(tempdir + "/glove.txt", []byte(dataset), 0644)
if err != nil {
t.Errorf("Could not create input file: %v", err)
}
t.Run("Generating index", func(t *testing.T) {
// Now build an index based on this
var gen_opts generator.Options
gen_opts.VectorCSVPath = tempdir + "/glove.txt"
gen_opts.TempDBPath = tempdir + "/tempdb"
gen_opts.OutputPrefix = tempdir + "/glove"
gen_opts.K = 3
generator.Generate(gen_opts)
})
// And load the index.
vi, err := LoadVectorFromDisk(tempdir + "/glove.knn", tempdir + "/glove.idx")
if err != nil {
t.Errorf("Could not load vectors from disk: %v", err)
}
shared_tests(t, vi)
}
func TestInMemoryIndex(t *testing.T) {
builder := InMemoryBuilder(3)
for i := 0; i < len(vectorTests); i ++ {
v := vectorTests[i]
builder.AddWord(v.word, NewVector(v.vec))
}
memory_index := Contextionary(builder.Build(3))
shared_tests(t, &memory_index)
}
func TestCombinedIndex(t *testing.T) {
builder1 := InMemoryBuilder(3)
builder2 := InMemoryBuilder(3)
split := 3
for i := 0; i < split; i ++ {
v := vectorTests[i]
builder1.AddWord(v.word, NewVector(v.vec))
}
for i := split; i < len(vectorTests); i ++ {
v := vectorTests[i]
builder2.AddWord(v.word, NewVector(v.vec))
}
memory_index1 := Contextionary(builder1.Build(3))
memory_index2 := Contextionary(builder2.Build(3))
var indices12 []Contextionary = []Contextionary { memory_index1, memory_index2, }
var indices21 []Contextionary = []Contextionary { memory_index2, memory_index1, }
t.Run("indices 1,2", func(t *testing.T) { test_combined(t, indices12) })
t.Run("indices 2,1", func(t *testing.T) { test_combined(t, indices21) })
}
func test_combined(t *testing.T, indices []Contextionary) {
combined_index, err := CombineVectorIndices(indices)
if err != nil {
t.Errorf("Combining failed")
t.FailNow()
}
err = combined_index.VerifyDisjoint()
if err != nil {
t.Errorf("Not disjoint; %v", err)
t.FailNow()
}
vi := Contextionary(combined_index)
shared_tests(t, &vi)
}
func shared_tests(t *testing.T, vi *Contextionary) {
t.Run("Number of elements is correct", func (t *testing.T) {
expected := 5
found := (*vi).GetNumberOfItems()
if found != expected {
t.Errorf("Expected to have %v items, but found %v", expected, found)
}
})
t.Run("Iterate over all items", func (t *testing.T) {
// Iterate over all items. Check index -> word, and lookup word -> index
length := ItemIndex((*vi).GetNumberOfItems())
for i := ItemIndex(0); i < length; i++ {
word, err := (*vi).ItemIndexToWord(ItemIndex(i))
if err != nil {
t.Errorf("Could not get item of index %+v, because: %+v", i, err)
}
i2 := (*vi).WordToItemIndex(word)
if i2 != i {
t.Errorf("Index -> Word -> Index failed!. i=%v, w=%v i2=%v", i, word, i2)
}
}
})
t.Run("Check that feature vectors are stored properly", func (t *testing.T) {
for i := 0; i < len(vectorTests); i++ {
vt := vectorTests[i]
word_index := (*vi).WordToItemIndex(vt.word)
if !word_index.IsPresent() {
t.Errorf("Could not find word %v", vt.word)
}
// Get back the feature vectors.
vector, err := (*vi).GetVectorForItemIndex(word_index)
if err != nil {
t.Errorf("Could not get vector")
}
if vector == nil {
t.Errorf("Vector missing!")
t.FailNow()
}
// and check that it's correct
vtvec := NewVector(vt.vec)
areEqual, err := vector.Equal(&vtvec)
if err != nil {
t.Errorf("Could not compare the two vectors: %v", err)
}
if !areEqual {
t.Errorf("Feature vector %v incorrect (word: %v). Expected %v, got %v", i, vt.word, vt.vec, vector.vector)
}
}
})
t.Run("Test that the distances between all pairs of test data is correct", func (t *testing.T) {
for i := 0; i < len(vectorTests); i ++ {
for j := 0; j < len(vectorTests); j ++ {
vt_a := vectorTests[i]
vt_b := vectorTests[j]
vt_a_vec := NewVector(vt_a.vec)
vt_b_vec := NewVector(vt_b.vec)
wi_a := (*vi).WordToItemIndex(vt_a.word)
wi_b := (*vi).WordToItemIndex(vt_b.word)
annoy_dist, err := (*vi).GetDistance(wi_a, wi_b)
if err != nil {
t.Errorf("Could not compute distance")
}
simple_dist, err := vt_a_vec.Distance(&vt_b_vec)
if err != nil { panic("should be same length") }
if !equal_float_epsilon(annoy_dist, simple_dist, 0.00003) {
t.Errorf("Distance between %v and %v incorrect; %v (annoy) vs %v (test impl)", vt_a.word, vt_b.word, annoy_dist, simple_dist)
}
}
}
})
t.Run("Test nearest neighbours apple & fruit", func (t *testing.T) {
apple_idx := (*vi).WordToItemIndex("apple")
fruit_idx := (*vi).WordToItemIndex("fruit")
res, distances, err := (*vi).GetNnsByItem(fruit_idx, 2, 3)
t.Logf("%v, %v, %v\n", res, distances, err)
if err != nil {
t.Errorf("GetNNs failed!")
}
if len(res) != 2 {
t.Errorf("Wrong number of items returned; got %v expected 2", len(res))
}
// res[0] will be fruit itself.
if res[0] != fruit_idx {
closest_to, _ := (*vi).ItemIndexToWord(res[0])
t.Errorf("closest element should be itself, fruit, but was '%v'. all results:\n%v", closest_to, debug_print_items(vi, res, distances))
}
if res[1] != apple_idx {
closest_to, _ := (*vi).ItemIndexToWord(res[1])
t.Errorf("apple should be closest to apple, but was '%v'. all results:\n%v", closest_to, debug_print_items(vi, res, distances))
}
if !equal_float_epsilon(distances[1], 0.2, 0.0002) {
t.Errorf("Wrong distances!, got %v", distances[1])
}
})
t.Run("Test nearest neighbours computer & company", func (t *testing.T) {
company_idx := (*vi).WordToItemIndex("company")
computer_idx := (*vi).WordToItemIndex("computer")
res, distances, err := (*vi).GetNnsByItem(company_idx, 2, 3)
if err != nil {
t.Errorf("GetNNs failed!")
}
if len(res) != 2 {
t.Errorf("Wrong number of items returned; got %v expected 2", len(res))
t.FailNow()
}
// res[0] will be company itself.
if res[1] != computer_idx {
t.Errorf("computer should be closest to company!")
}
if !equal_float_epsilon(distances[1], 1, 0.0002) {
t.Errorf("Wrong distances!, got %v", distances[1])
}
})
t.Run("Test k-nearest from vector", func (t *testing.T) {
var apple_pie = NewVector(/* centroid of apple and pie */ []float32{0.5, 0.5,0})
fruit_idx := (*vi).WordToItemIndex("fruit")
apple_idx := (*vi).WordToItemIndex("apple")
pie_idx := (*vi).WordToItemIndex("pie")
res, distances, err := (*vi).GetNnsByVector(apple_pie, 3, 3)
if err != nil {
t.Errorf("GetNNs failed: %v", err)
t.FailNow()
}
if len(res) != 3 {
t.Errorf("Wrong number of items returned; got %v expected 3", len(res))
t.FailNow()
}
if res[0] != fruit_idx {
closest_to, _ := (*vi).ItemIndexToWord(res[1])
t.Errorf("fruit should be closest to fruit!, but was '%v'", closest_to)
t.Errorf("got results: %+v", res)
for _, i := range(res) {
word, err := (*vi).ItemIndexToWord(i)
t.Errorf("got word: %v (err: %v)", word, err)
}
}
if res[1] != apple_idx {
closest_to, _ := (*vi).ItemIndexToWord(res[1])
t.Errorf("apple should be 2nd closest to apple!, but was '%v'", closest_to)
}
if res[2] != pie_idx {
closest_to, _ := (*vi).ItemIndexToWord(res[2])
t.Errorf("pie should be 3rd closest to pie!, but was '%v'", closest_to)
}
v_fruit, err := (*vi).GetVectorForItemIndex(fruit_idx);
if err != nil { t.Errorf("could not fetch fruit vector"); return }
v_apple, err := (*vi).GetVectorForItemIndex(apple_idx);
if err != nil { panic("could not fetch apple vector") }
v_pie, err := (*vi).GetVectorForItemIndex(pie_idx);
if err != nil { panic("could not fetch pie vector") }
distance_to_fruit, err := apple_pie.Distance(v_fruit)
if err != nil { panic("should be same length") }
if !equal_float_epsilon(distances[0], distance_to_fruit, 0.0001) {
t.Errorf("Wrong distance for fruit, expect %v, got %v", distance_to_fruit, distances[0])
}
distance_to_apple, err := apple_pie.Distance(v_apple)
if err != nil { panic("should be same length") }
if !equal_float_epsilon(distances[1], distance_to_apple, 0.0001) {
t.Errorf("Wrong distance for apple, got %v", distances[1])
}
distance_to_pie, err := apple_pie.Distance(v_pie)
if err != nil { panic("should be same size") }
if !equal_float_epsilon(distances[2], distance_to_pie, 0.0001) {
t.Errorf("Wrong distance for pie, expected %v, got %v", distance_to_pie, distances[2])
}
});
}
func equal_float_epsilon(a float32, b float32, epsilon float32) bool |
func debug_print_items(vi *Contextionary, items []ItemIndex, distances []float32) string {
result := ""
for i, item := range(items) {
w, _ := (*vi).ItemIndexToWord(item)
result += fmt.Sprintf("%v: %v (%v)\n", item, w, distances[i])
}
return result
}
| {
var min, max float32
if a < b {
min = a
max = b
} else {
min = b
max = a
}
return max <= (min + epsilon)
} |
doc.go | // Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Code generated by protoc-gen-go_gapic. DO NOT EDIT.
//
// NOTE: This package is in beta. It is not stable, and may be subject to changes.
//
// Use of Context
//
// The ctx passed to NewClient is used for authentication requests and
// for creating the underlying connection, but is not used for subsequent calls.
// Individual methods on the client use the ctx given to them.
//
// To close the open connection, use the Close() method.
//
// For information about setting deadlines, reusing contexts, and more
// please visit pkg.go.dev/cloud.google.com/go.
package dashboard // import "cloud.google.com/go/monitoring/dashboard/apiv1"
import (
"context"
"os"
"runtime"
"strconv"
"strings"
"unicode"
"google.golang.org/api/option"
"google.golang.org/grpc/metadata"
)
// For more information on implementing a client constructor hook, see
// https://github.com/googleapis/google-cloud-go/wiki/Customizing-constructors.
type clientHookParams struct{}
type clientHook func(context.Context, clientHookParams) ([]option.ClientOption, error)
const versionClient = "20200925"
func insertMetadata(ctx context.Context, mds ...metadata.MD) context.Context {
out, _ := metadata.FromOutgoingContext(ctx)
out = out.Copy()
for _, md := range mds {
for k, v := range md {
out[k] = append(out[k], v...)
}
}
return metadata.NewOutgoingContext(ctx, out)
}
func checkDisableDeadlines() (bool, error) {
raw, ok := os.LookupEnv("GOOGLE_API_GO_EXPERIMENTAL_DISABLE_DEFAULT_DEADLINE")
if !ok {
return false, nil
}
b, err := strconv.ParseBool(raw)
return b, err
}
// DefaultAuthScopes reports the default set of authentication scopes to use with this package.
func DefaultAuthScopes() []string {
return []string{
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/monitoring",
"https://www.googleapis.com/auth/monitoring.read",
"https://www.googleapis.com/auth/monitoring.write",
}
}
// versionGo returns the Go runtime version. The returned string
// has no whitespace, suitable for reporting in header.
func versionGo() string {
const develPrefix = "devel +"
s := runtime.Version()
if strings.HasPrefix(s, develPrefix) {
s = s[len(develPrefix):]
if p := strings.IndexFunc(s, unicode.IsSpace); p >= 0 {
s = s[:p]
}
return s
}
notSemverRune := func(r rune) bool {
return !strings.ContainsRune("0123456789.", r)
}
if strings.HasPrefix(s, "go1") {
s = s[2:]
var prerelease string
if p := strings.IndexFunc(s, notSemverRune); p >= 0 {
s, prerelease = s[:p], s[p:]
}
if strings.HasSuffix(s, ".") | else if strings.Count(s, ".") < 2 {
s += ".0"
}
if prerelease != "" {
s += "-" + prerelease
}
return s
}
return "UNKNOWN"
}
| {
s += "0"
} |
aws.go | /*
Copyright 2018 Turbine Labs, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Package aws provides integrations with Amazon EC2 and ECS. See
// "rotor help aws" and "rotor help ecs" for usage.
package aws
import (
"errors"
"fmt" | "github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/service/ec2"
"github.com/turbinelabs/api"
"github.com/turbinelabs/cli/command"
tbnflag "github.com/turbinelabs/nonstdlib/flag"
"github.com/turbinelabs/nonstdlib/flag/usage"
"github.com/turbinelabs/nonstdlib/log/console"
tbnstrings "github.com/turbinelabs/nonstdlib/strings"
"github.com/turbinelabs/rotor"
"github.com/turbinelabs/rotor/updater"
)
const (
defaultClusterTagNamespace = "tbn:cluster"
delimiter = ":"
awsDescription = `Connects to the AWS API in a given region and
updates Clusters stored in the Turbine Labs API at startup and periodically
thereafter.
EC2 instance tags are used to determine to which clusters an instance belongs.
An EC2 instance may belong to multiple clusters, serving traffic on multiple
ports. Cluster membership on a port is declared with a tag, of the form:
"<namespace>:<cluster-name>:<port>"=""
The port must be numeric, and the cluster name cannot contain the delimiter.
The delimiter is ":" and the default namespace is "` + defaultClusterTagNamespace + `".
Tags of the following form will be added to the Instance in the appropriate
Cluster, as "<key>"="<value>":
"<namespace>:<cluster-name>:<port>:<key>"="<value>"
If key/value tags are included, the cluster membership tag is optional.
Tags without the namespaced cluster/port prefix will be added to all Instances
in all Clusters to which the EC2 Instance belongs.
By default, all EC2 Instances in the VPC are examined, but additional filters
can be specified (see -filters).`
)
func AWSCmd(updaterFlags rotor.UpdaterFromFlags) *command.Cmd {
runner := &awsRunner{filterStrs: tbnflag.NewStrings()}
cmd := &command.Cmd{
Name: "aws",
Summary: "aws collector",
Usage: "[OPTIONS]",
Description: awsDescription,
Runner: runner,
}
flags := tbnflag.Wrap(&cmd.Flags)
flags.StringVar(
&runner.settings.namespace,
"cluster-tag-namespace",
defaultClusterTagNamespace,
"The namespace for cluster tags",
)
flags.StringVar(
&runner.settings.vpcID,
"vpc-id",
"",
usage.Required("The ID of the VPC in which rotor is running"),
)
flags.Var(
&runner.filterStrs,
"filters",
"A comma-delimited list of key/value pairs, used to specify additional "+
"EC2 Instances filters. Of the form `\"<key>=<value>,...\"`. "+
"See http://goo.gl/kSCOHS for a discussion of available filters.",
)
runner.awsFlags = newClientFromFlags(flags)
runner.updaterFlags = updaterFlags
return cmd
}
type awsCollectorSettings struct {
namespace string
delimiter string
vpcID string
filters map[string][]string
}
type awsRunner struct {
filterStrs tbnflag.Strings
settings awsCollectorSettings
awsFlags clientFromFlags
updaterFlags rotor.UpdaterFromFlags
}
func (r *awsRunner) Run(cmd *command.Cmd, args []string) command.CmdErr {
filters, err := r.processFilters(r.filterStrs.Strings)
if err != nil {
return cmd.BadInput(err)
}
r.settings.filters = filters
r.settings.delimiter = delimiter
if err := r.updaterFlags.Validate(); err != nil {
return cmd.BadInput(err)
}
u, err := r.updaterFlags.Make()
if err != nil {
return cmd.Error(err)
}
c := awsCollector{
settings: r.settings,
ec2Svc: r.awsFlags.MakeEC2Client(),
}
updater.Loop(u, c.getClusters)
return command.NoError()
}
func (r *awsRunner) processFilters(strs []string) (map[string][]string, error) {
filters := map[string][]string{}
for _, str := range strs {
key, value := tbnstrings.SplitFirstEqual(str)
if key == "" || value == "" {
return nil, fmt.Errorf("malformed filter: %q", str)
}
filters[key] = append(filters[key], value)
}
return filters, nil
}
type awsCollector struct {
settings awsCollectorSettings
ec2Svc ec2Interface
}
func (c awsCollector) getClusters() ([]api.Cluster, error) {
params := &ec2.DescribeInstancesInput{Filters: c.mkFilters()}
resp, err := c.ec2Svc.DescribeInstances(params)
if err != nil {
return nil, fmt.Errorf("error executing aws api list: %s", err.Error())
}
return c.reservationsToClusters(resp.Reservations), nil
}
func (c awsCollector) mkFilters() []*ec2.Filter {
filters := []*ec2.Filter{
// return only running instances
{
Name: aws.String("instance-state-name"),
Values: []*string{aws.String("running")},
},
// in the provided VPC
{
Name: aws.String("vpc-id"),
Values: []*string{aws.String(c.settings.vpcID)},
},
}
// add custom filters
for key, values := range c.settings.filters {
valuePtrs := []*string{}
for _, value := range values {
valuePtrs = append(valuePtrs, aws.String(value))
}
filters = append(filters, &ec2.Filter{Name: aws.String(key), Values: valuePtrs})
}
return filters
}
func (c awsCollector) reservationsToClusters(reservs []*ec2.Reservation) api.Clusters {
clustersMap := map[string]*api.Cluster{}
for _, res := range reservs {
for _, inst := range res.Instances {
c.processEC2Instance(clustersMap, inst)
}
}
clusters := make(api.Clusters, 0, len(clustersMap))
for _, cluster := range clustersMap {
sort.Sort(api.InstancesByHostPort(cluster.Instances))
clusters = append(clusters, *cluster)
}
sort.Sort(api.ClusterByName(clusters))
return clusters
}
func (c awsCollector) processEC2Instance(clusters map[string]*api.Cluster, inst *ec2.Instance) {
host := *inst.PrivateIpAddress
tpm := tagAndPortMap{prefix: c.settings.namespace, delimiter: c.settings.delimiter}
// process all tags, extracting cluster-namespaced key/value pairs and ports
for _, tag := range inst.Tags {
// TODO: consider adding other machine metadata as tags
if err := tpm.processTag(*tag.Key, *tag.Value); err != nil {
console.Error().Printf("Skipping tag for Instance %s: %s", host, err)
}
}
for clusterAndPort, md := range tpm.clusterTagMap {
metadata := api.MetadataFromMap(md)
for key, value := range tpm.globalTagMap {
metadata = append(metadata, api.Metadatum{Key: key, Value: value})
}
sort.Sort(api.MetadataByKey(metadata))
instance := api.Instance{
Host: host,
Port: clusterAndPort.port,
Metadata: metadata,
}
clusterName := clusterAndPort.cluster
cluster := clusters[clusterName]
if cluster == nil {
cluster = &api.Cluster{
Name: clusterName,
Instances: []api.Instance{},
}
clusters[clusterName] = cluster
}
cluster.Instances = append(cluster.Instances, instance)
}
}
type clusterAndPort struct {
cluster string
port int
}
func newClusterAndPort(terms []string) (clusterAndPort, error) {
nope := clusterAndPort{}
if len(terms) < 2 {
return nope, errors.New("must have at least cluster and port")
}
port, err := strconv.ParseUint(terms[1], 10, 16)
if err != nil {
return nope, fmt.Errorf("bad port: %s", err)
}
if port == 0 {
return nope, fmt.Errorf("port must be non zero")
}
if terms[0] == "" {
return nope, errors.New("cluster must be non-empty")
}
return clusterAndPort{terms[0], int(port)}, nil
}
// encapsulates extracting tags and ports from prefixed keys and values for a
// single instance.
type tagAndPortMap struct {
prefix string
delimiter string
clusterTagMap map[clusterAndPort]map[string]string
globalTagMap map[string]string
}
func (tpm *tagAndPortMap) processTag(key, value string) error {
if key == "" {
return fmt.Errorf("empty tag key for value: %q", value)
}
prefixWithDelim := tpm.prefix + tpm.delimiter
// if it doesn't have the prefix, it's a global tag
if !strings.HasPrefix(key, prefixWithDelim) {
if tpm.globalTagMap == nil {
tpm.globalTagMap = map[string]string{}
}
tpm.globalTagMap[key] = value
return nil
}
// remove the prefix
suffix := key[len(prefixWithDelim):]
if suffix == "" {
return fmt.Errorf("tag key empty after %q prefix removed: %q=%q", prefixWithDelim, key, value)
}
terms := strings.SplitN(suffix, tpm.delimiter, 3)
if len(terms) < 2 {
return fmt.Errorf("tag key must have at least cluster name and port: %q=%q", key, value)
}
candp, err := newClusterAndPort(terms)
if err != nil {
return fmt.Errorf("malformed cluster/port in tag key: %q: %s", suffix, err)
}
if tpm.clusterTagMap == nil {
tpm.clusterTagMap = map[clusterAndPort]map[string]string{}
}
if tpm.clusterTagMap[candp] == nil {
tpm.clusterTagMap[candp] = map[string]string{}
}
if len(terms) > 2 {
k := terms[2]
if k == "" {
return fmt.Errorf("tag key cluster name and port, but empty key: %q", key)
}
tpm.clusterTagMap[candp][k] = value
}
return nil
} | "sort"
"strconv"
"strings"
|
options.rs | use clap::{AppSettings, Clap};
use std::ffi::OsString;
use std::path::PathBuf;
#[derive(Debug, Clap)]
#[clap(about = "The rusty git", version = clap::crate_version!())]
#[clap(setting = AppSettings::SubcommandRequired)]
#[clap(setting = AppSettings::ColoredHelp)]
pub struct Args {
/// Do not display verbose messages and progress information
#[clap(long, short = 'q')]
pub quiet: bool,
/// Bring up a terminal user interface displaying progress visually
#[clap(long, conflicts_with("quiet"))]
pub progress: bool,
/// The progress TUI will stay up even though the work is already completed.
///
/// Use this to be able to read progress messages or additional information visible in the TUI log pane.
#[clap(long, conflicts_with("quiet"), requires("progress"))]
pub progress_keep_open: bool,
#[clap(subcommand)]
pub cmd: Subcommands,
}
#[derive(Debug, Clap)]
pub enum Subcommands {
/// Initialize the repository in the current directory.
#[clap(visible_alias = "initialize")]
#[clap(setting = AppSettings::ColoredHelp, setting = AppSettings::DisableVersion)]
Init {
/// The directory in which to initialize a new git repository.
///
/// Defaults to the current working directory.
directory: Option<PathBuf>,
},
/// A selection of useful tools
#[clap(setting = AppSettings::ColoredHelp, setting = AppSettings::DisableVersion, setting = AppSettings::SubcommandRequired)]
#[clap(visible_alias = "t")]
Tools(ToolCommands),
#[cfg(debug_assertions)]
Panic,
}
#[derive(Debug, Clap)]
pub enum ToolCommands {
/// Find all repositories in a given directory.
#[clap(setting = AppSettings::ColoredHelp, setting = AppSettings::DisableVersion)]
Find {
/// The directory in which to find all git repositories.
///
/// Defaults to the current working directory.
root: Option<PathBuf>,
},
/// Move all repositories found in a directory into a structure matching their clone URLs.
#[clap(setting = AppSettings::ColoredHelp, setting = AppSettings::DisableVersion)]
Organize {
#[clap(long)]
/// The operation will be in dry-run mode unless this flag is set.
execute: bool,
#[clap(long, short = 'f')]
/// The directory to use when finding input repositories to move into position.
///
/// Defaults to the current working directory.
repository_source: Option<PathBuf>,
#[clap(long, short = 't')]
/// The directory to which to move repositories found in the repository-source.
///
/// Defaults to the current working directory.
destination_directory: Option<PathBuf>,
},
EstimateHours(EstimateHours),
}
#[derive(Debug, Clap)]
#[clap(
about = "Estimate hours worked basted on a commit history",
long_about = "See https://github.com/kimmobrunfeldt/git-hours#how-it-works for details",
version = clap::crate_version!(),
visible_alias = "h")
]
#[clap(setting = clap::AppSettings::ColoredHelp)]
pub struct EstimateHours {
/// The directory containing a '.git/' folder.
#[clap(parse(from_os_str))]
#[clap(validator_os = validator::is_repo)]
#[clap(default_value = ".")]
pub working_dir: PathBuf,
/// The name of the ref like 'main' or 'master' at which to start iterating the commit graph.
#[clap(default_value("main"))]
pub refname: OsString,
/// Show personally identifiable information before the summary. Includes names and email addresses.
#[clap(short = 'p', long)]
pub show_pii: bool,
/// Omit unifying identities by name and email which can lead to the same author appear multiple times
/// due to using different names or email addresses.
#[clap(short = 'i', long)]
pub omit_unify_identities: bool,
}
mod validator {
use anyhow::Context;
use std::{ffi::OsStr, path::PathBuf};
fn is_repo_inner(dir: &OsStr) -> anyhow::Result<()> {
let git_dir = PathBuf::from(dir).join(".git");
let p = git_dir
.canonicalize()
.with_context(|| format!("Could not canonicalize git repository at '{}'", git_dir.display()))?;
if p.extension().unwrap_or_default() == "git"
|| p.file_name().unwrap_or_default() == ".git"
|| p.join("HEAD").is_file()
{
Ok(())
} else {
Err(anyhow::anyhow!(
"Path '{}' needs to be a directory containing '.git/'",
p.display()
))
}
}
pub fn is_repo(dir: &OsStr) -> Result<(), String> |
}
| {
is_repo_inner(dir).map_err(|err| format!("{:#}", err))
} |
ng-zorro-antd-skeleton.js | import { __assign, __spread } from 'tslib';
import { Component, ChangeDetectionStrategy, ViewEncapsulation, ChangeDetectorRef, Renderer2, ElementRef, Input, NgModule } from '@angular/core';
import { toCssPixel } from 'ng-zorro-antd/core';
import { CommonModule } from '@angular/common';
/**
* @fileoverview added by tsickle
* Generated from: nz-skeleton.component.ts
* @suppress {checkTypes,constantProperty,extraRequire,missingOverride,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc
*/
var NzSkeletonComponent = /** @class */ (function () {
function NzSkeletonComponent(cdr, renderer, elementRef) {
this.cdr = cdr;
this.nzActive = false;
this.nzLoading = true;
this.nzTitle = true;
this.nzAvatar = false;
this.nzParagraph = true;
this.rowsList = [];
this.widthList = [];
renderer.addClass(elementRef.nativeElement, 'ant-skeleton');
}
/**
* @param {?=} value
* @return {?}
*/
NzSkeletonComponent.prototype.toCSSUnit = /**
* @param {?=} value
* @return {?}
*/
function (value) {
if (value === void 0) { value = ''; }
return toCssPixel(value);
};
/**
* @private
* @return {?}
*/
NzSkeletonComponent.prototype.getTitleProps = /**
* @private
* @return {?}
*/
function () {
/** @type {?} */
var hasAvatar = !!this.nzAvatar;
/** @type {?} */
var hasParagraph = !!this.nzParagraph;
/** @type {?} */
var width = '';
if (!hasAvatar && hasParagraph) {
width = '38%';
}
else if (hasAvatar && hasParagraph) {
width = '50%';
}
return __assign({ width: width }, this.getProps(this.nzTitle));
};
/**
* @private
* @return {?}
*/
NzSkeletonComponent.prototype.getAvatarProps = /**
* @private
* @return {?}
*/
function () {
/** @type {?} */
var shape = !!this.nzTitle && !this.nzParagraph ? 'square' : 'circle';
/** @type {?} */
var size = 'large';
return __assign({ shape: shape, size: size }, this.getProps(this.nzAvatar));
};
/**
* @private
* @return {?}
*/
NzSkeletonComponent.prototype.getParagraphProps = /**
* @private
* @return {?}
*/
function () {
/** @type {?} */
var hasAvatar = !!this.nzAvatar;
/** @type {?} */
var hasTitle = !!this.nzTitle;
/** @type {?} */
var basicProps = {};
// Width
if (!hasAvatar || !hasTitle) {
basicProps.width = '61%';
}
// Rows
if (!hasAvatar && hasTitle) {
basicProps.rows = 3;
}
else {
basicProps.rows = 2;
}
return __assign({}, basicProps, this.getProps(this.nzParagraph));
};
/**
* @private
* @template T
* @param {?} prop
* @return {?}
*/
NzSkeletonComponent.prototype.getProps = /**
* @private
* @template T
* @param {?} prop
* @return {?}
*/
function (prop) {
return prop && typeof prop === 'object' ? prop : {};
};
/**
* @private
* @return {?}
*/
NzSkeletonComponent.prototype.getWidthList = /**
* @private
* @return {?}
*/
function () {
var _a = this.paragraph, width = _a.width, rows = _a.rows;
/** @type {?} */
var widthList = [];
if (width && Array.isArray(width)) {
widthList = width;
}
else if (width && !Array.isArray(width)) {
widthList = [];
widthList[(/** @type {?} */ (rows)) - 1] = width;
}
return widthList;
};
/**
* @private
* @return {?}
*/
NzSkeletonComponent.prototype.updateProps = /**
* @private
* @return {?}
*/
function () {
this.title = this.getTitleProps();
this.avatar = this.getAvatarProps();
this.paragraph = this.getParagraphProps();
this.rowsList = __spread(Array(this.paragraph.rows));
this.widthList = this.getWidthList();
this.cdr.markForCheck();
};
/**
* @return {?}
*/
NzSkeletonComponent.prototype.ngOnInit = /**
* @return {?}
*/
function () {
this.updateProps();
};
/**
* @param {?} changes
* @return {?}
*/
NzSkeletonComponent.prototype.ngOnChanges = /**
* @param {?} changes
* @return {?}
*/
function (changes) {
if (changes.nzTitle || changes.nzAvatar || changes.nzParagraph) {
this.updateProps();
}
};
NzSkeletonComponent.decorators = [
{ type: Component, args: [{
changeDetection: ChangeDetectionStrategy.OnPush,
encapsulation: ViewEncapsulation.None,
selector: 'nz-skeleton',
exportAs: 'nzSkeleton',
template: "<ng-container *ngIf=\"nzLoading\">\n <div class=\"ant-skeleton-header\" *ngIf=\"!!nzAvatar\">\n <span\n class=\"ant-skeleton-avatar\"\n [class.ant-skeleton-avatar-lg]=\"avatar.size === 'large'\"\n [class.ant-skeleton-avatar-sm]=\"avatar.size === 'small'\"\n [class.ant-skeleton-avatar-circle]=\"avatar.shape === 'circle'\"\n [class.ant-skeleton-avatar-square]=\"avatar.shape === 'square'\">\n </span>\n </div>\n <div class=\"ant-skeleton-content\">\n <h3 *ngIf=\"!!nzTitle\" class=\"ant-skeleton-title\" [style.width]=\"toCSSUnit(title.width)\"></h3>\n <ul *ngIf=\"!!nzParagraph\" class=\"ant-skeleton-paragraph\">\n <li *ngFor=\"let row of rowsList; let i=index\" [style.width]=\"toCSSUnit(widthList[i])\">\n </li>\n </ul>\n </div>\n</ng-container>\n<ng-container *ngIf=\"!nzLoading\">\n <ng-content></ng-content>\n</ng-container>",
host: {
'[class.ant-skeleton-with-avatar]': '!!nzAvatar',
'[class.ant-skeleton-active]': 'nzActive'
}
}] }
];
/** @nocollapse */
NzSkeletonComponent.ctorParameters = function () { return [
{ type: ChangeDetectorRef },
{ type: Renderer2 },
{ type: ElementRef }
]; };
NzSkeletonComponent.propDecorators = {
nzActive: [{ type: Input }],
nzLoading: [{ type: Input }],
nzTitle: [{ type: Input }],
nzAvatar: [{ type: Input }],
nzParagraph: [{ type: Input }]
};
return NzSkeletonComponent;
}());
if (false) {
/** @type {?} */
NzSkeletonComponent.prototype.nzActive;
/** @type {?} */
NzSkeletonComponent.prototype.nzLoading;
/** @type {?} */
NzSkeletonComponent.prototype.nzTitle;
/** @type {?} */
NzSkeletonComponent.prototype.nzAvatar;
/** @type {?} */
NzSkeletonComponent.prototype.nzParagraph;
/** @type {?} */
NzSkeletonComponent.prototype.title;
/** @type {?} */
NzSkeletonComponent.prototype.avatar;
/** @type {?} */
NzSkeletonComponent.prototype.paragraph;
/** @type {?} */
NzSkeletonComponent.prototype.rowsList;
/** @type {?} */
NzSkeletonComponent.prototype.widthList;
/**
* @type {?}
* @private
*/
NzSkeletonComponent.prototype.cdr;
}
/**
* @fileoverview added by tsickle
* Generated from: nz-skeleton.module.ts
* @suppress {checkTypes,constantProperty,extraRequire,missingOverride,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc
*/
var NzSkeletonModule = /** @class */ (function () {
function NzSkeletonModule() {
}
NzSkeletonModule.decorators = [
{ type: NgModule, args: [{
declarations: [NzSkeletonComponent],
imports: [CommonModule],
exports: [NzSkeletonComponent]
},] }
];
return NzSkeletonModule;
}());
/**
* @fileoverview added by tsickle
* Generated from: nz-skeleton.type.ts
* @suppress {checkTypes,constantProperty,extraRequire,missingOverride,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc
*/
/**
* @license
* Copyright Alibaba.com All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://github.com/NG-ZORRO/ng-zorro-antd/blob/master/LICENSE
*/
/**
* @record
*/
function NzSkeletonAvatar() { }
if (false) {
/** @type {?|undefined} */
NzSkeletonAvatar.prototype.size;
/** @type {?|undefined} */
NzSkeletonAvatar.prototype.shape;
}
/**
* @record
*/
function NzSkeletonTitle() { }
if (false) {
/** @type {?|undefined} */
NzSkeletonTitle.prototype.width;
}
/**
* @record
*/
function NzSkeletonParagraph() { }
if (false) {
/** @type {?|undefined} */
NzSkeletonParagraph.prototype.rows;
/** @type {?|undefined} */
NzSkeletonParagraph.prototype.width;
}
| /**
* @fileoverview added by tsickle
* Generated from: public-api.ts
* @suppress {checkTypes,constantProperty,extraRequire,missingOverride,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc
*/
/**
* @fileoverview added by tsickle
* Generated from: ng-zorro-antd-skeleton.ts
* @suppress {checkTypes,constantProperty,extraRequire,missingOverride,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc
*/
export { NzSkeletonComponent, NzSkeletonModule };
//# sourceMappingURL=ng-zorro-antd-skeleton.js.map | |
useDomWidth.ts | import {ref, onMounted, onUnmounted} from 'vue'
import {debounce} from 'lodash'
/**
* description: 获取页面宽度
*/
export function useDomWidth( | nst domWidth = ref(window.innerWidth)
function resize() {
domWidth.value = document.body.clientWidth
}
onMounted(() => {
window.addEventListener('resize', debounce(resize, 80))
})
onUnmounted(() => {
window.removeEventListener('resize', resize)
})
return domWidth
}
| ) {
co |
uniq-self-in-mut-slot.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(unknown_features)]
#![feature(box_syntax)]
struct | {
a: isize
}
trait Changer {
fn change(self: Box<Self>) -> Box<Self>;
}
impl Changer for X {
fn change(mut self: Box<X>) -> Box<X> {
self.a = 55;
self
}
}
pub fn main() {
let x: Box<_> = box X { a: 32 };
let new_x = x.change();
assert_eq!(new_x.a, 55);
}
| X |
init.js | const Database = require('./config')
const initDb = {
async init(){
const db = await Database()
await db.exec(`CREATE TABLE profile (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT,
avatar TEXT,
monthly_budget INT,
days_per_week INT,
hours_per_day INT,
vacation_per_year INT,
value_hour INT
)`)
await db.exec(`CREATE TABLE jobs (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT,
daily_hours INT,
total_hours INT,
created_at DATETIME
)`)
await db.run(`INSERT INTO profile(
name,
avatar,
monthly_budget,
days_per_week,
hours_per_day,
vacation_per_year,
value_hour
) VALUES (
"Lucas Ezidro",
"https://github.com/lucasezidro.png",
3000,
5,
4,
2,
90
)`)
await db.run(`INSERT INTO jobs(
name,
daily_hours,
total_hours,
created_at
) VALUES (
"Pizzaria gulosso",
2,
1,
1617514376018
)`)
await db.run(`INSERT INTO jobs( | created_at
) VALUES (
"OneTwo Project",
2,
47,
1617514376018
)`)
await db.close()
}}
initDb.init() | name,
daily_hours,
total_hours, |
post.controller.ts | import {
Body,
Controller,
Delete,
Get,
Param,
ParseIntPipe,
Post,
Put,
Query,
UseGuards,
UsePipes,
ValidationPipe,
} from '@nestjs/common';
import { PostService } from './post.service';
import { CreatePostDto } from './dto/create-post.dto';
import { GetUser } from '../auth/decorator/getAuthUser.decorator';
import { User } from '../auth/entity/user.entity';
import { AuthGuard } from '@nestjs/passport';
import { UpdatePostDto } from './dto/update-post.dto';
import { FilterDto } from './dto/filter.dto';
@Controller('post')
@UseGuards(AuthGuard())
export class | {
constructor(private postService: PostService) {}
@Post('/add')
@UsePipes(ValidationPipe)
async addPost(
@GetUser() user: User,
@Body() postDto: CreatePostDto,
): Promise<any> {
return await this.postService.addPost(postDto, user);
}
// Post by Id
@Get('/:id')
async getPostById(
@Param('id', ParseIntPipe) id: number,
@GetUser() user: User,
): Promise<any> {
return await this.postService.getPostById(id, user);
}
// deletePostById
@Delete('/:id')
async deletePostById(
@Param('id', ParseIntPipe) id: number,
@GetUser() user: User,
): Promise<any> {
return await this.postService.deletePostById(id, user);
}
// updatePostById
@Put('/:id')
@UsePipes(ValidationPipe)
async updatePostById(
@Param('id', ParseIntPipe) id: number,
@GetUser() user: User,
@Body() updatePostDto: UpdatePostDto,
): Promise<any> {
return await this.postService.updatePostById(id, user, updatePostDto);
}
// getAllPost
@Get('/post/all')
async getAllPost(
@Query(ValidationPipe) filterDto: FilterDto,
@GetUser() user: User,
): Promise<any> {
return await this.postService.getAllPost(user, filterDto);
}
}
| PostController |
kendo.culture.ibb.min.js | /**
* Kendo UI v2019.3.917 (http://www.telerik.com/kendo-ui)
* Copyright 2019 Progress Software Corporation and/or one of its subsidiaries or affiliates. All rights reserved.
*
* Kendo UI commercial licenses may be obtained at
* http://www.telerik.com/purchase/license-agreement/kendo-ui-complete
* If you do not own a commercial license, this file shall be governed by the trial license terms.
|
*/
!function(y){"function"==typeof define&&define.amd?define(["kendo.core.min"],y):y()}(function(){!function(y,e){kendo.cultures.ibb={name:"ibb",numberFormat:{pattern:["-n"],decimals:2,",":",",".":".",groupSize:[3],percent:{pattern:["-n%","n%"],decimals:2,",":",",".":".",groupSize:[3],symbol:"%"},currency:{name:"",abbr:"",pattern:["-$n","$ n"],decimals:2,",":",",".":".",groupSize:[3],symbol:"₦"}},calendars:{standard:{days:{names:["Sunday","Monday","Tuesday","Wednesday","Thursday","Friday","Saturday"],namesAbbr:["Sun","Mon","Tue","Wed","Thu","Fri","Sat"],namesShort:["Su","Mo","Tu","We","Th","Fr","Sa"]},months:{names:["January","February","March","April","May","June","July","August","September","October","November","December"],namesAbbr:["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"]},AM:["AM","am","AM"],PM:["PM","pm","PM"],patterns:{d:"d/M/yyyy",D:"dddd, MMMM dd, yyyy",F:"dddd, MMMM dd, yyyy h:mm:ss tt",g:"d/M/yyyy h:mmtt",G:"d/M/yyyy h:mm:ss tt",m:"MMMM dd",M:"MMMM dd",s:"yyyy'-'MM'-'dd'T'HH':'mm':'ss",t:"h:mmtt",T:"h:mm:ss tt",u:"yyyy'-'MM'-'dd HH':'mm':'ss'Z'",y:"yyyy MMMM",Y:"yyyy MMMM"},"/":"/",":":":",firstDay:0}}}}(this)});
//# sourceMappingURL=kendo.culture.ibb.min.js.map | |
brecord.rs | // Copyright (c) 2018 Bart Massey
// [This program is licensed under the "MIT License"]
// Please see the file LICENSE in the source
// distribution of this software for license terms.
//! Read monaural audio output using the PulseAudio blocking
//! interface and write it to stdout as signed 16-bit
//! big-endian samples. On Linux, you can save the output in
//! a file named "test.s16" and play it with the
//! [SoX](http://sox.sourceforge.net) `play` command:
//! `play -B -r 44100 test.s16`.
use byteorder::{BigEndian, WriteBytesExt};
use portaudio as pa;
use std::io::stdout;
/// Sample rate in frames per second.
const SAMPLE_RATE: f32 = 44_100.0;
/// Input time in milliseconds.
const MSECS: usize = 3000;
/// Size of input buffer in frames. Less than 1024 frames
/// is not recommended, as most audio interfaces will choke
/// horribly.
const BUFFER_SIZE: usize = 1024;
/// Total number of frames to be received.
const FRAMES: usize = (SAMPLE_RATE * MSECS as f32 / 1000.0) as usize;
/// Total number of buffers to be received. The audio
/// interface requires whole buffers, so this number
/// may be one low due to truncation.
const BUFFERS: usize = FRAMES / BUFFER_SIZE;
fn main() -> Result<(), pa::Error> {
eprintln!("read audio input and write to stdout");
eprintln!("sample_rate: {}, msecs: {}", SAMPLE_RATE, MSECS);
eprintln!("buffer size: {}, buffers: {}", BUFFER_SIZE, BUFFERS);
eprintln!(
"last buffer nominal size: {}",
BUFFER_SIZE * (BUFFERS + 1) - FRAMES
);
// Set up the stream.
let pa = pa::PortAudio::new()?;
let settings = pa.default_input_stream_settings(
1, // 1 channel
SAMPLE_RATE as f64,
BUFFER_SIZE as u32,
)?;
let mut stream = pa.open_blocking_stream(settings)?;
stream.start()?;
// Get a handle to the output.
let mut stdout = stdout();
// Read all the frames.
let mut read = 0; | while read < FRAMES {
// On overflow, do not panic, but fill with zeros
// and skip ahead to the next buffer. This may not
// be quite right, but is as good as we can do.
match stream.read(BUFFER_SIZE as u32) {
Ok(buffer) => {
assert_eq!(buffer.len(), BUFFER_SIZE);
for &b in buffer {
stdout
.write_i16::<BigEndian>(b)
.expect("bad write of audio buffer");
}
}
Err(pa::Error::InputOverflowed) => {
eprintln!("overflow: read = {}", read);
for &b in [0; BUFFER_SIZE].iter() {
stdout
.write_i16::<BigEndian>(b)
.expect("bad write of zeros");
}
}
status => {
status?;
}
}
// Advance to next buffer.
read += BUFFER_SIZE;
}
// Tear down the stream.
stream.stop()?;
stream.close()?;
Ok(())
} | |
gesture_pan.rs | // This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files (https://github.com/gtk-rs/gir-files)
// DO NOT EDIT
use gdk;
use glib::object::Cast;
use glib::object::IsA;
use glib::object::ObjectType as ObjectType_;
use glib::signal::connect_raw;
use glib::signal::SignalHandlerId;
use glib::translate::*;
use glib::StaticType;
use glib::ToValue;
use glib_sys;
use gtk_sys;
use libc;
use std::boxed::Box as Box_;
use std::fmt;
use std::mem::transmute;
use EventController;
use Gesture;
use GestureDrag;
use GestureSingle;
use Orientation;
use PanDirection;
use PropagationPhase;
use Widget;
glib_wrapper! {
pub struct GesturePan(Object<gtk_sys::GtkGesturePan, gtk_sys::GtkGesturePanClass, GesturePanClass>) @extends GestureDrag, GestureSingle, Gesture, EventController;
match fn {
get_type => || gtk_sys::gtk_gesture_pan_get_type(),
}
}
impl GesturePan {
pub fn new<P: IsA<Widget>>(widget: &P, orientation: Orientation) -> GesturePan {
skip_assert_initialized!();
unsafe {
Gesture::from_glib_full(gtk_sys::gtk_gesture_pan_new(
widget.as_ref().to_glib_none().0,
orientation.to_glib(),
))
.unsafe_cast()
}
}
pub fn get_orientation(&self) -> Orientation |
pub fn set_orientation(&self, orientation: Orientation) {
unsafe {
gtk_sys::gtk_gesture_pan_set_orientation(self.to_glib_none().0, orientation.to_glib());
}
}
pub fn connect_pan<F: Fn(&GesturePan, PanDirection, f64) + 'static>(
&self,
f: F,
) -> SignalHandlerId {
unsafe extern "C" fn pan_trampoline<F: Fn(&GesturePan, PanDirection, f64) + 'static>(
this: *mut gtk_sys::GtkGesturePan,
direction: gtk_sys::GtkPanDirection,
offset: libc::c_double,
f: glib_sys::gpointer,
) {
let f: &F = &*(f as *const F);
f(&from_glib_borrow(this), from_glib(direction), offset)
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"pan\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
pan_trampoline::<F> as *const (),
)),
Box_::into_raw(f),
)
}
}
pub fn connect_property_orientation_notify<F: Fn(&GesturePan) + 'static>(
&self,
f: F,
) -> SignalHandlerId {
unsafe extern "C" fn notify_orientation_trampoline<F: Fn(&GesturePan) + 'static>(
this: *mut gtk_sys::GtkGesturePan,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) {
let f: &F = &*(f as *const F);
f(&from_glib_borrow(this))
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::orientation\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_orientation_trampoline::<F> as *const (),
)),
Box_::into_raw(f),
)
}
}
}
#[derive(Clone, Default)]
pub struct GesturePanBuilder {
orientation: Option<Orientation>,
button: Option<u32>,
exclusive: Option<bool>,
touch_only: Option<bool>,
n_points: Option<u32>,
window: Option<gdk::Window>,
propagation_phase: Option<PropagationPhase>,
widget: Option<Widget>,
}
impl GesturePanBuilder {
pub fn new() -> Self {
Self::default()
}
pub fn build(self) -> GesturePan {
let mut properties: Vec<(&str, &dyn ToValue)> = vec![];
if let Some(ref orientation) = self.orientation {
properties.push(("orientation", orientation));
}
if let Some(ref button) = self.button {
properties.push(("button", button));
}
if let Some(ref exclusive) = self.exclusive {
properties.push(("exclusive", exclusive));
}
if let Some(ref touch_only) = self.touch_only {
properties.push(("touch-only", touch_only));
}
if let Some(ref n_points) = self.n_points {
properties.push(("n-points", n_points));
}
if let Some(ref window) = self.window {
properties.push(("window", window));
}
if let Some(ref propagation_phase) = self.propagation_phase {
properties.push(("propagation-phase", propagation_phase));
}
if let Some(ref widget) = self.widget {
properties.push(("widget", widget));
}
let ret = glib::Object::new(GesturePan::static_type(), &properties)
.expect("object new")
.downcast::<GesturePan>()
.expect("downcast");
ret
}
pub fn orientation(mut self, orientation: Orientation) -> Self {
self.orientation = Some(orientation);
self
}
pub fn button(mut self, button: u32) -> Self {
self.button = Some(button);
self
}
pub fn exclusive(mut self, exclusive: bool) -> Self {
self.exclusive = Some(exclusive);
self
}
pub fn touch_only(mut self, touch_only: bool) -> Self {
self.touch_only = Some(touch_only);
self
}
pub fn n_points(mut self, n_points: u32) -> Self {
self.n_points = Some(n_points);
self
}
pub fn window<P: IsA<gdk::Window>>(mut self, window: &P) -> Self {
self.window = Some(window.clone().upcast());
self
}
pub fn propagation_phase(mut self, propagation_phase: PropagationPhase) -> Self {
self.propagation_phase = Some(propagation_phase);
self
}
pub fn widget<P: IsA<Widget>>(mut self, widget: &P) -> Self {
self.widget = Some(widget.clone().upcast());
self
}
}
impl fmt::Display for GesturePan {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "GesturePan")
}
}
| {
unsafe {
from_glib(gtk_sys::gtk_gesture_pan_get_orientation(
self.to_glib_none().0,
))
}
} |
keybindings_test.go | package gui
import (
"testing"
"github.com/danvergara/dblab/pkg/client"
"github.com/danvergara/gocui"
"github.com/stretchr/testify/assert"
)
func TestKeyBindings(t *testing.T) | {
g, _ := New(&gocui.Gui{}, &client.Client{})
err := g.keybindings()
assert.NoError(t, err)
} |
|
chart_test.go | /*
Copyright (c) 2018 Bitnami
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package chart
import (
"bytes"
"crypto/x509"
"encoding/json"
"fmt"
"io"
"io/ioutil"
"net/http"
"os"
"path"
"testing"
"time"
"github.com/google/go-cmp/cmp"
"github.com/arschles/assert"
appRepov1 "github.com/kubeapps/kubeapps/cmd/apprepository-controller/pkg/apis/apprepository/v1alpha1"
"github.com/kubeapps/kubeapps/pkg/kube"
corev1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
chartv2 "k8s.io/helm/pkg/proto/hapi/chart"
"k8s.io/helm/pkg/repo"
)
const testChartArchive = "./testdata/nginx-apiVersion-v1-5.1.1.tgz"
func Test_resolveChartURL(t *testing.T) {
tests := []struct {
name string
baseURL string
chartURL string
wantedURL string
}{
{
"absolute url",
"http://www.google.com",
"http://charts.example.com/repo/wordpress-0.1.0.tgz",
"http://charts.example.com/repo/wordpress-0.1.0.tgz",
},
{
"relative, repo url",
"http://charts.example.com/repo/",
"wordpress-0.1.0.tgz",
"http://charts.example.com/repo/wordpress-0.1.0.tgz",
},
{
"relative, repo index url",
"http://charts.example.com/repo/index.yaml",
"wordpress-0.1.0.tgz",
"http://charts.example.com/repo/wordpress-0.1.0.tgz",
},
{
"relative, repo url - no trailing slash",
"http://charts.example.com/repo",
"wordpress-0.1.0.tgz",
"http://charts.example.com/wordpress-0.1.0.tgz",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
chartURL, err := resolveChartURL(tt.baseURL, tt.chartURL)
assert.NoErr(t, err)
assert.Equal(t, chartURL, tt.wantedURL, "url")
})
}
}
func TestFindChartInRepoIndex(t *testing.T) {
name := "foo"
version := "v1.0.0"
chartURL := "wordpress-0.1.0.tgz"
repoURL := "http://charts.example.com/repo/"
expectedURL := fmt.Sprintf("%s%s", repoURL, chartURL)
chartMeta := chartv2.Metadata{Name: name, Version: version}
chartVersion := repo.ChartVersion{URLs: []string{chartURL}}
chartVersion.Metadata = &chartMeta
chartVersions := []*repo.ChartVersion{&chartVersion}
entries := map[string]repo.ChartVersions{}
entries[name] = chartVersions
index := &repo.IndexFile{APIVersion: "v1", Generated: time.Now(), Entries: entries}
res, err := findChartInRepoIndex(index, repoURL, name, version)
if err != nil {
t.Errorf("Unexpected error %v", err)
}
if res != expectedURL {
t.Errorf("Expecting %s to be resolved as %s", res, expectedURL)
}
}
func TestParseDetails(t *testing.T) {
testCases := []struct {
name string
data string
expected *Details
err bool
}{
{
name: "parses request including app repo resource",
data: `{
"appRepositoryResourceName": "my-chart-repo",
"chartName": "test",
"releaseName": "foo",
"version": "1.0.0",
"values": "foo: bar"
}`,
expected: &Details{
AppRepositoryResourceName: "my-chart-repo",
ChartName: "test",
ReleaseName: "foo",
Version: "1.0.0",
Values: "foo: bar",
},
},
{
name: "errors if appRepositoryResourceName is not present",
data: `{
"chartName": "test",
"releaseName": "foo",
"version": "1.0.0",
"values": "foo: bar"
}`,
err: true,
},
{
name: "errors if appRepositoryResourceName is empty",
data: `{
"appRepositoryResourceName": "",
"chartName": "test",
"releaseName": "foo",
"version": "1.0.0",
"values": "foo: bar"
}`,
err: true,
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
ch := ChartClient{}
details, err := ch.ParseDetails([]byte(tc.data))
if tc.err {
if err == nil {
t.Fatalf("expected error")
} else {
return
}
}
if err != nil {
t.Fatalf("Unexpected error: %v", err)
}
if !cmp.Equal(tc.expected, details) {
t.Errorf(cmp.Diff(tc.expected, details))
}
})
}
}
// fakeLoadChartV2 implements LoadChartV2 interface.
func fakeLoadChartV2(in io.Reader) (*chartv2.Chart, error) {
return &chartv2.Chart{}, nil
}
func TestparseDetailsForHTTPClient(t *testing.T) {
systemCertPool, err := x509.SystemCertPool()
if err != nil {
t.Fatalf("%+v", err)
}
const (
authHeaderSecretName = "auth-header-secret-name"
authHeaderSecretData = "really-secret-stuff"
customCASecretName = "custom-ca-secret-name"
customCASecretData = "some-cert-data"
appRepoName = "custom-repo"
)
testCases := []struct {
name string
details *Details
appRepoSpec appRepov1.AppRepositorySpec
errorExpected bool
numCertsExpected int
}{
{
name: "default cert pool without auth",
details: &Details{
AppRepositoryResourceName: appRepoName,
},
numCertsExpected: len(systemCertPool.Subjects()),
},
{
name: "custom CA added when passed an AppRepository CRD",
details: &Details{
AppRepositoryResourceName: appRepoName,
},
appRepoSpec: appRepov1.AppRepositorySpec{
Auth: appRepov1.AppRepositoryAuth{
CustomCA: &appRepov1.AppRepositoryCustomCA{
SecretKeyRef: corev1.SecretKeySelector{
corev1.LocalObjectReference{customCASecretName},
"custom-secret-key",
nil,
},
},
},
},
numCertsExpected: len(systemCertPool.Subjects()) + 1,
},
{
name: "errors if secret for custom CA secret cannot be found",
details: &Details{
AppRepositoryResourceName: appRepoName,
},
appRepoSpec: appRepov1.AppRepositorySpec{
Auth: appRepov1.AppRepositoryAuth{
CustomCA: &appRepov1.AppRepositoryCustomCA{
SecretKeyRef: corev1.SecretKeySelector{
corev1.LocalObjectReference{"other-secret-name"},
"custom-secret-key",
nil,
},
},
},
},
errorExpected: true,
},
{
name: "authorization header added when passed an AppRepository CRD",
details: &Details{
AppRepositoryResourceName: appRepoName,
},
appRepoSpec: appRepov1.AppRepositorySpec{
Auth: appRepov1.AppRepositoryAuth{
Header: &appRepov1.AppRepositoryAuthHeader{
SecretKeyRef: corev1.SecretKeySelector{
corev1.LocalObjectReference{authHeaderSecretName},
"custom-secret-key",
nil,
},
},
},
},
numCertsExpected: len(systemCertPool.Subjects()),
},
{
name: "errors if auth secret cannot be found",
details: &Details{
AppRepositoryResourceName: appRepoName,
},
appRepoSpec: appRepov1.AppRepositorySpec{
Auth: appRepov1.AppRepositoryAuth{
CustomCA: &appRepov1.AppRepositoryCustomCA{
SecretKeyRef: corev1.SecretKeySelector{
corev1.LocalObjectReference{"other-secret-name"},
"custom-secret-key",
nil,
},
},
},
},
errorExpected: true,
},
}
for _, tc := range testCases {
// The fake k8s client will contain secret for the CA and header respectively.
secrets := []*corev1.Secret{&corev1.Secret{
ObjectMeta: metav1.ObjectMeta{
Name: customCASecretName,
Namespace: metav1.NamespaceSystem,
},
Data: map[string][]byte{
"custom-secret-key": []byte(customCASecretData),
},
}, &corev1.Secret{
ObjectMeta: metav1.ObjectMeta{
Name: authHeaderSecretName,
Namespace: metav1.NamespaceSystem,
},
Data: map[string][]byte{
"custom-secret-key": []byte(authHeaderSecretData),
},
}}
apprepos := []*appRepov1.AppRepository{&appRepov1.AppRepository{
ObjectMeta: metav1.ObjectMeta{
Name: tc.details.AppRepositoryResourceName,
Namespace: metav1.NamespaceSystem,
},
Spec: tc.appRepoSpec,
}}
chUtils := ChartClient{
appRepoHandler: &kube.FakeHandler{Secrets: secrets, AppRepos: apprepos},
kubeappsNamespace: metav1.NamespaceSystem,
}
t.Run(tc.name, func(t *testing.T) {
appRepo, caCertSecret, authSecret, err := chUtils.parseDetailsForHTTPClient(tc.details)
if err != nil {
if tc.errorExpected {
return
}
t.Fatalf("%+v", err)
} else {
if tc.errorExpected {
t.Fatalf("got: nil, want: error")
}
}
// If the Auth header was set, secrets should be returned
if tc.appRepoSpec.Auth.Header != nil && authSecret == nil {
t.Errorf("Expecting auth secret")
}
if tc.appRepoSpec.Auth.CustomCA != nil && caCertSecret == nil {
t.Errorf("Expecting auth secret")
}
// The client holds a reference to the appRepo.
if got, want := appRepo, apprepos[0]; !cmp.Equal(got, want) {
t.Errorf(cmp.Diff(got, want))
}
})
}
}
// Fake server for repositories and charts
type fakeHTTPClient struct {
repoURL string
chartURLs []string
index *repo.IndexFile
userAgent string
// TODO(absoludity): perhaps switch to use httptest instead of our own fake?
requests []*http.Request
defaultHeaders http.Header
}
// Do for this fake client will return a chart if it exists in the
// index *and* the corresponding chart exists in the testdata directory.
func (f *fakeHTTPClient) Do(h *http.Request) (*http.Response, error) {
// Record the request for later test assertions.
for k, v := range f.defaultHeaders {
// Only add the default header if it's not already set in the request.
if _, ok := h.Header[k]; !ok {
h.Header[k] = v
}
}
f.requests = append(f.requests, h)
if f.userAgent != "" && h.Header.Get("User-Agent") != f.userAgent {
return nil, fmt.Errorf("Wrong user agent: %s", h.Header.Get("User-Agent"))
}
if h.URL.String() == fmt.Sprintf("%sindex.yaml", f.repoURL) {
// Return fake chart index
body, err := json.Marshal(*f.index)
if err != nil {
return nil, err
}
return &http.Response{StatusCode: 200, Body: ioutil.NopCloser(bytes.NewReader(body))}, nil
}
for _, chartURL := range f.chartURLs {
if h.URL.String() == chartURL |
}
// Unexpected path
return &http.Response{StatusCode: 404}, fmt.Errorf("Unexpected path %q for chartURLs %+v", h.URL.String(), f.chartURLs)
}
func newHTTPClient(repoURL string, charts []Details, userAgent string) kube.HTTPClient {
var chartURLs []string
entries := map[string]repo.ChartVersions{}
// Populate Chart registry with content of the given helmReleases
for _, ch := range charts {
chartMeta := chartv2.Metadata{Name: ch.ChartName, Version: ch.Version}
chartURL := fmt.Sprintf("%s%s-%s.tgz", repoURL, ch.ChartName, ch.Version)
chartURLs = append(chartURLs, chartURL)
chartVersion := repo.ChartVersion{Metadata: &chartMeta, URLs: []string{chartURL}}
chartVersions := []*repo.ChartVersion{&chartVersion}
entries[ch.ChartName] = chartVersions
}
index := &repo.IndexFile{APIVersion: "v1", Generated: time.Now(), Entries: entries}
return &fakeHTTPClient{
repoURL: repoURL,
chartURLs: chartURLs,
index: index,
userAgent: userAgent,
defaultHeaders: http.Header{"User-Agent": []string{userAgent}},
}
}
// getFakeClientRequests returns the requests which were issued to the fake test client.
func getFakeClientRequests(t *testing.T, c kube.HTTPClient) []*http.Request {
fakeClient, ok := c.(*fakeHTTPClient)
if !ok {
t.Fatalf("client was not a fakeHTTPClient")
}
return fakeClient.requests
}
func TestGetChart(t *testing.T) {
const repoName = "foo-repo"
testCases := []struct {
name string
chartVersion string
userAgent string
requireV1Support bool
errorExpected bool
}{
{
name: "gets the chart without a user agent",
chartVersion: "5.1.1-apiVersionV1",
userAgent: "",
},
{
name: "gets the chart with a user agent",
chartVersion: "5.1.1-apiVersionV1",
userAgent: "tiller-proxy/devel",
},
{
name: "gets a v2 chart without error when v1 support not required",
chartVersion: "5.1.1-apiVersionV2",
requireV1Support: false,
},
{
name: "returns an error for a v2 chart if v1 support required",
chartVersion: "5.1.1-apiVersionV2",
requireV1Support: true,
errorExpected: true,
},
}
const repoURL = "http://example.com/"
for _, tc := range testCases {
target := Details{
AppRepositoryResourceName: repoName,
ChartName: "nginx",
ReleaseName: "foo",
Version: tc.chartVersion,
}
t.Run(tc.name, func(t *testing.T) {
httpClient := newHTTPClient(repoURL, []Details{target}, tc.userAgent)
chUtils := ChartClient{
userAgent: tc.userAgent,
appRepo: &appRepov1.AppRepository{
ObjectMeta: metav1.ObjectMeta{
Name: repoName,
Namespace: metav1.NamespaceSystem,
},
Spec: appRepov1.AppRepositorySpec{
URL: repoURL,
},
},
}
ch, err := chUtils.GetChart(&target, httpClient, tc.requireV1Support)
if err != nil {
if tc.errorExpected {
if got, want := err.Error(), "apiVersion 'v2' is not valid. The value must be \"v1\""; got != want {
t.Fatalf("got: %q, want: %q", got, want)
} else {
// Continue to the next test.
return
}
}
t.Fatalf("Unexpected error: %v", err)
}
// Currently tests return an nginx chart from ./testdata
// We need to ensure it got loaded in both version formats.
if got, want := ch.Helm2Chart.GetMetadata().GetName(), "nginx"; got != want {
t.Errorf("got: %q, want: %q", got, want)
}
if ch.Helm3Chart == nil {
t.Errorf("got: nil, want: non-nil")
} else if got, want := ch.Helm3Chart.Name(), "nginx"; got != want {
t.Errorf("got: %q, want: %q", got, want)
}
requests := getFakeClientRequests(t, httpClient)
// We expect one request for the index and one for the chart.
if got, want := len(requests), 2; got != want {
t.Fatalf("got: %d, want %d", got, want)
}
for i, url := range []string{
chUtils.appRepo.Spec.URL + "index.yaml",
fmt.Sprintf("%s%s-%s.tgz", chUtils.appRepo.Spec.URL, target.ChartName, target.Version),
} {
if got, want := requests[i].URL.String(), url; got != want {
t.Errorf("got: %q, want: %q", got, want)
}
if got, want := requests[i].Header.Get("User-Agent"), tc.userAgent; got != want {
t.Errorf("got: %q, want: %q", got, want)
}
}
})
}
}
func TestGetIndexFromCache(t *testing.T) {
repoURL := "https://test.com"
data := []byte("foo")
index, sha := getIndexFromCache(repoURL, data)
if index != nil {
t.Error("Index should be empty since it's not in the cache yet")
}
fakeIndex := &repo.IndexFile{}
storeIndexInCache(repoURL, fakeIndex, sha)
index, _ = getIndexFromCache(repoURL, data)
if index != fakeIndex {
t.Error("It should return the stored index")
}
}
func TestClientWithDefaultHeaders(t *testing.T) {
testCases := []struct {
name string
requestHeaders http.Header
defaultHeaders http.Header
expectedHeaders http.Header
}{
{
name: "no headers added when none set",
defaultHeaders: http.Header{},
expectedHeaders: http.Header{},
},
{
name: "existing headers in the request remain present",
requestHeaders: http.Header{"Some-Other": []string{"value"}},
defaultHeaders: http.Header{},
expectedHeaders: http.Header{"Some-Other": []string{"value"}},
},
{
name: "headers are set when present",
defaultHeaders: http.Header{
"User-Agent": []string{"foo/devel"},
"Authorization": []string{"some-token"},
},
expectedHeaders: http.Header{
"User-Agent": []string{"foo/devel"},
"Authorization": []string{"some-token"},
},
},
{
name: "headers can have multiple values",
defaultHeaders: http.Header{
"Authorization": []string{"some-token", "some-other-token"},
},
expectedHeaders: http.Header{
"Authorization": []string{"some-token", "some-other-token"},
},
},
{
name: "default headers do not overwrite request headers",
requestHeaders: http.Header{
"Authorization": []string{"request-auth-token"},
"Other-Request-Header": []string{"other-request-header"},
},
defaultHeaders: http.Header{
"Authorization": []string{"default-auth-token"},
"Other-Default-Header": []string{"other-default-header"},
},
expectedHeaders: http.Header{
"Authorization": []string{"request-auth-token"},
"Other-Request-Header": []string{"other-request-header"},
"Other-Default-Header": []string{"other-default-header"},
},
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
client := &fakeHTTPClient{
defaultHeaders: tc.defaultHeaders,
}
request, err := http.NewRequest("GET", "http://example.com/foo", nil)
if err != nil {
t.Fatalf("%+v", err)
}
for k, v := range tc.requestHeaders {
request.Header[k] = v
}
client.Do(request)
requestsWithHeaders := getFakeClientRequests(t, client)
if got, want := len(requestsWithHeaders), 1; got != want {
t.Fatalf("got: %d, want: %d", got, want)
}
requestWithHeader := requestsWithHeaders[0]
if got, want := requestWithHeader.Header, tc.expectedHeaders; !cmp.Equal(got, want) {
t.Errorf(cmp.Diff(want, got))
}
})
}
}
| {
// Fake chart response
testChartPath := path.Join(".", "testdata", h.URL.Path)
f, err := os.Open(testChartPath)
if err != nil {
return &http.Response{StatusCode: 404}, fmt.Errorf("unable to open test chart archive: %q", testChartPath)
}
return &http.Response{StatusCode: 200, Body: f}, nil
} |
server.py | #!/usr/bin/env python
from __future__ import unicode_literals
import configargparse
import sys
from config.config import statusCode,benchmark_types, language_supported, file_location
import config.bleu_results as bleu_results
import tools.sp_enc_dec as sp
import ancillary_functions_anuvaad.ancillary_functions as ancillary_functions
import ancillary_functions_anuvaad.sc_preface_handler as sc_preface_handler
import ancillary_functions_anuvaad.handle_date_url as date_url_util
from flask import Flask, jsonify, request,send_file,abort,send_from_directory
from flask_cors import CORS
from onmt.translate import TranslationServer, ServerModelError
from itertools import repeat
from onmt.utils.logging import init_logger,logger,entry_exit_log,LOG_TAGS
from onmt.utils.misc import split_corpus
from onmt.translate.translator import build_translator
import os
import onmt.opts as opts
from onmt.utils.parse import ArgumentParser
from config.mongo_model import db,Benchmarks
import datetime
from kafka_utils.document_translator import doc_translator
import threading
import translation_util.translate_util as translate_util
import translation_util.interactive_translate as interactive_translation
from config.kafka_topics import consumer_topics,producer_topics,kafka_topic
STATUS_OK = "ok"
STATUS_ERROR = "error"
mongo_config_dir = "config/mongo_config.py"
IS_RUN_KAFKA = 'IS_RUN_KAFKA'
IS_RUN_KAFKA_DEFAULT_VALUE = False
bootstrap_server_boolean = os.environ.get(IS_RUN_KAFKA, IS_RUN_KAFKA_DEFAULT_VALUE)
def start(config_file,
url_root="/translator",
host="0.0.0.0",
port=3003,
debug=True):
def prefix_route(route_function, prefix='', mask='{0}{1}'):
def newroute(route, *args, **kwargs):
return route_function(mask.format(prefix, route), *args, **kwargs)
return newroute
app = Flask(__name__)
CORS(app)
app.config.from_pyfile(mongo_config_dir)
db.init_app(app)
app.route = prefix_route(app.route, url_root)
translation_server = TranslationServer()
translation_server.start(config_file)
def kafka_function():
logger.info('starting kafka from nmt-server on thread-1')
doc_translator(translation_server,[kafka_topic[0]['consumer'],kafka_topic[1]['consumer'],kafka_topic[2]['consumer']])
if bootstrap_server_boolean:
t1 = threading.Thread(target=kafka_function)
# t1.start()
@app.route('/models', methods=['GET'])
def get_models():
out = {}
try:
out['status'] = statusCode["SUCCESS"]
out['response_body'] = translation_server.list_models()
except:
out['status'] = statusCode["SYSTEM_ERR"]
logger.info("Unexpected error: %s"% sys.exc_info()[0])
return jsonify(out)
@app.route('/clone_model/<int:model_id>', methods=['POST'])
def clone_model(model_id):
out = {}
data = request.get_json(force=True)
timeout = -1
if 'timeout' in data:
timeout = data['timeout']
del data['timeout']
opt = data.get('opt', None)
try:
model_id, load_time = translation_server.clone_model(
model_id, opt, timeout)
except ServerModelError as e:
out['status'] = STATUS_ERROR
out['error'] = str(e)
else:
out['status'] = STATUS_OK
out['model_id'] = model_id
out['load_time'] = load_time
return jsonify(out)
@app.route('/unload_model/<int:model_id>', methods=['GET'])
def unload_model(model_id):
out = {"model_id": model_id}
| out['status'] = STATUS_OK
except Exception as e:
out['status'] = STATUS_ERROR
out['error'] = str(e)
return jsonify(out)
@app.route('/translate-anuvaad', methods=['POST'])
def translate():
inputs = request.get_json(force=True)
if len(inputs)>0:
logger.info("Making translate-anuvaad API call")
logger.info(entry_exit_log(LOG_TAGS["input"],inputs))
out = translate_util.translate_func(inputs, translation_server)
logger.info("out from translate_func-trans_util done{}".format(out))
logger.info(entry_exit_log(LOG_TAGS["output"],out))
return jsonify(out)
else:
logger.info("null inputs in request in translate-anuvaad API")
return jsonify({'status':statusCode["INVALID_API_REQUEST"]})
@app.route('/to_cpu/<int:model_id>', methods=['GET'])
def to_cpu(model_id):
out = {'model_id': model_id}
translation_server.models[model_id].to_cpu()
out['status'] = STATUS_OK
return jsonify(out)
@app.route('/to_gpu/<int:model_id>', methods=['GET'])
def to_gpu(model_id):
out = {'model_id': model_id}
translation_server.models[model_id].to_gpu()
out['status'] = STATUS_OK
return jsonify(out)
app.run(debug=debug, host=host, port=port, use_reloader=False,
threaded=True)
def _get_parser():
parser = configargparse.ArgumentParser(
config_file_parser_class=configargparse.YAMLConfigFileParser,
description="OpenNMT-py REST Server")
parser.add_argument("--ip", type=str, default="0.0.0.0")
parser.add_argument("--port", type=int, default="3003")
parser.add_argument("--url_root", type=str, default="/translator")
parser.add_argument("--debug", "-d", action="store_true")
parser.add_argument("--config", "-c", type=str,
default="./available_models/conf.json")
return parser
if __name__ == '__main__':
parser = _get_parser()
args = parser.parse_args()
start(args.config, url_root=args.url_root, host=args.ip, port=args.port,
debug=args.debug) | try:
translation_server.unload_model(model_id) |
dashboard.go | package permissions
import (
"strings"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/services/sqlstore/migrator"
)
type DashboardPermissionFilter struct {
OrgRole models.RoleType
Dialect migrator.Dialect
UserId int64
OrgId int64
PermissionLevel models.PermissionType
}
func (d DashboardPermissionFilter) Where() (string, []interface{}) {
if d.OrgRole == models.ROLE_ADMIN {
return "", nil
}
okRoles := []interface{}{d.OrgRole}
if d.OrgRole == models.ROLE_EDITOR {
okRoles = append(okRoles, models.ROLE_VIEWER)
}
falseStr := d.Dialect.BooleanStr(false)
sql := `(
dashboard.id IN (
SELECT distinct DashboardId from (
SELECT d.id AS DashboardId
FROM dashboard AS d
LEFT JOIN dashboard_acl AS da ON
da.dashboard_id = d.id OR
da.dashboard_id = d.folder_id
WHERE
d.org_id = ? AND
da.permission >= ? AND
(
da.user_id = ? OR
da.team_id IN (SELECT team_id from team_member AS tm WHERE tm.user_id = ?) OR
da.role IN (?` + strings.Repeat(",?", len(okRoles)-1) + `)
)
UNION
SELECT d.id AS DashboardId
FROM dashboard AS d
LEFT JOIN dashboard AS folder on folder.id = d.folder_id
LEFT JOIN dashboard_acl AS da ON
( | (folder.id IS NULL AND d.has_acl = ` + falseStr + `)
)
)
WHERE
d.org_id = ? AND
da.permission >= ? AND
(
da.user_id = ? OR
da.role IN (?` + strings.Repeat(",?", len(okRoles)-1) + `)
)
) AS a
)
)
`
params := []interface{}{d.OrgId, d.PermissionLevel, d.UserId, d.UserId}
params = append(params, okRoles...)
params = append(params, d.OrgId, d.PermissionLevel, d.UserId)
params = append(params, okRoles...)
return sql, params
} | -- include default permissions -->
da.org_id = -1 AND (
(folder.id IS NOT NULL AND folder.has_acl = ` + falseStr + `) OR |
service_test.go | package verticacheckd
import (
"reflect"
"testing"
)
func | (t *testing.T) {
data := struct {
address string
name string
args []string
}{
address: "10.0.1.66",
name: "cat",
args: []string{"testdata/sample_output.txt"},
}
cases := []struct {
svc *checkService
}{
{
&checkService{
address: data.address,
cmd: data.name,
cmdArgs: data.args,
},
},
}
for _, c := range cases {
svc := NewService(data.address, data.name, data.args)
if !reflect.DeepEqual(svc, c.svc) {
t.Errorf("expected %v to be %v", svc, c.svc)
}
}
}
func TestService_HostState(t *testing.T) {
command := struct {
name string
args []string
}{
name: "cat",
args: []string{"testdata/sample_output.txt"},
}
cases := []struct {
found bool
svc *checkService
}{
{
true,
&checkService{
address: "10.0.1.66",
cmd: command.name,
cmdArgs: command.args,
},
},
{
true,
&checkService{
address: "172.31.47.139",
cmd: command.name,
cmdArgs: command.args,
},
},
{
false,
&checkService{
address: "172.31.47.100",
cmd: command.name,
cmdArgs: command.args,
},
},
}
for _, c := range cases {
check, err := c.svc.HostState()
if err != nil {
t.Errorf("expected %v to be %v", nil, err)
}
if check != c.found {
t.Errorf("expected %v to be %v", c.found, check)
}
}
}
func TestService_DBHostState(t *testing.T) {
command := struct {
name string
args []string
}{
name: "cat",
args: []string{"testdata/sample_output.txt"},
}
cases := []struct {
db string
found bool
svc *checkService
}{
{
"climatedb",
true,
&checkService{
address: "10.0.1.66",
cmd: command.name,
cmdArgs: command.args,
},
},
{
"climatedb",
true,
&checkService{
address: "172.31.47.139",
cmd: command.name,
cmdArgs: command.args,
},
},
{
"climatedb",
false,
&checkService{
address: "172.31.47.100",
cmd: command.name,
cmdArgs: command.args,
},
},
{
"otherdb",
false,
&checkService{
address: "10.0.1.66",
cmd: command.name,
cmdArgs: command.args,
},
},
}
for _, c := range cases {
check, err := c.svc.DBHostState(c.db)
if err != nil {
t.Errorf("expected %v to be %v", nil, err)
}
if check != c.found {
t.Errorf("expected %v to be %v", c.found, check)
}
}
}
| TestService_NewService |
setup.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import re
import os
from setuptools import setup
def get_version(package):
"""
Return package version as listed in `__version__` in `init.py`.
"""
init_py = open(os.path.join(package, '__init__.py')).read()
return re.search("__version__ = ['\"]([^'\"]+)['\"]", init_py).group(1)
class UltraMagicString(object):
'''
Taken from
http://stackoverflow.com/questions/1162338/whats-the-right-way-to-use-unicode-metadata-in-setup-py
'''
def __init__(self, value):
self.value = value
def __str__(self):
return self.value
def __unicode__(self):
return self.value.decode('UTF-8')
def __add__(self, other):
return UltraMagicString(self.value + str(other))
def split(self, *args, **kw):
|
long_description = '\n\n'.join((
open('README.rst').read(),
open('CHANGES.rst').read(),
))
setup(
name = 'autofixture',
version = get_version('autofixture'),
url = 'https://github.com/gregmuellegger/django-autofixture',
license = 'BSD',
description = 'Provides tools to auto generate test data.',
long_description = long_description,
author = 'Gregor Müllegger',
author_email = '[email protected]',
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.4',
'Framework :: Django :: 1.5',
'Framework :: Django :: 1.6',
'Framework :: Django :: 1.7',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.9',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
packages = [
'autofixture',
'autofixture.management',
'autofixture.management.commands'],
install_requires = ['setuptools'],
test_suite = 'runtests.runtests',
)
| return self.value.split(*args, **kw) |
KClosestPointstoOrigin.py | __source__ = 'https://leetcode.com/problems/k-closest-points-to-origin/'
# Time: O(NLogN ~ N)
# Space: O(N)
#
# Quick Select: K-problem
# Description: Leetcode # 973. K Closest Points to Origin
#
# We have a list of points on the plane. Find the K closest points to the origin (0, 0).
#
# (Here, the distance between two points on a plane is the Euclidean distance.)
#
# You may return the answer in any order.
# The answer is guaranteed to be unique (except for the order that it is in.)
#
# Example 1:
#
# Input: points = [[1,3],[-2,2]], K = 1
# Output: [[-2,2]]
# Explanation:
# The distance between (1, 3) and the origin is sqrt(10).
# The distance between (-2, 2) and the origin is sqrt(8).
# Since sqrt(8) < sqrt(10), (-2, 2) is closer to the origin.
# We only want the closest K = 1 points from the origin, so the answer is just [[-2,2]].
# Example 2:
#
# Input: points = [[3,3],[5,-1],[-2,4]], K = 2
# Output: [[3,3],[-2,4]]
# (The answer [[-2,4],[3,3]] would also be accepted.)
#
#
# Note:
#
# 1 <= K <= points.length <= 10000
# -10000 < points[i][0] < 10000
# -10000 < points[i][1] < 10000
#
import unittest
import random
# 428ms 99.89%
class Solution(object):
def kClosest(self, points, K):
"""
:type points: List[List[int]]
:type K: int
:rtype: List[List[int]]
"""
dist = lambda i: points[i][0]**2 + points[i][1]**2
def work(i, j, K):
|
work(0, len(points) - 1, K)
return points[:K]
class TestMethods(unittest.TestCase):
def test_Local(self):
self.assertEqual(1, 1)
if __name__ == '__main__':
unittest.main()
Java = '''
# Thought: https://leetcode.com/problems/k-closest-points-to-origin/solution/
# Approach 1: Sort
# Complexity Analysis
# Time Complexity: O(NlogN), where N is the length of points.
# Space Complexity: O(N)
# 71ms 62.57%
class Solution {
public int[][] kClosest(int[][] points, int K) {
Arrays.sort(points, (int[] a, int[] b) -> (a[0] * a[0] + a[1] * a[1]) - (b[0]* b[0] + b[1]* b[1]));
int[][] res = new int[K][];
for (int i = 0; i < K; i++) {
res[i] = points[i];
}
return res;
}
}
# Approach 2: Divide and Conquer
# Complexity Analysis
# Time Complexity: O(N) in average case complexity, where N is the length of points.
# Space Complexity: O(N)
# 11ms 100%
import java.util.concurrent.ThreadLocalRandom;
class Solution {
int[][] points;
public int[][] kClosest(int[][] points, int K) {
this.points = points;
work(0, points.length - 1, K);
return Arrays.copyOfRange(points, 0, K);
}
public void work(int i, int j, int K) {
if (i >= j) return;
int oi = i, oj = j;
int pivot = dist(ThreadLocalRandom.current().nextInt(i, j));
while (i < j) {
while (i < j && dist(i) < pivot) i++;
while (i < j && dist(j) > pivot) j--;
swap(i, j);
}
if (K <= i - oi + 1) {
work(oi, i, K);
} else {
work(i + 1, oj, K - (i - oi + 1));
}
}
public int dist(int i) {
return points[i][0] * points[i][0] + points[i][1] * points[i][1];
}
public void swap(int i, int j) {
int t0 = points[i][0], t1 = points[i][1];
points[i][0] = points[j][0];
points[i][1] = points[j][1];
points[j][0] = t0;
points[j][1] = t1;
}
}
# https://leetcode.com/problems/k-closest-points-to-origin/discuss/220235/Java-Three-solutions-to-this-classical-K-th-problem.
# This is a very classical problem, so-called K-th problem.
# Here I will share some summaries and some classical solutions to this kind of problem.
#
# I. The very naive and simple solution is sorting the all points by their distance to the origin point directly,
# then get the top k closest points. We can use the sort function and the code is very short.
#
# Theoretically, the time complexity is O(NlogN), pratically, the real time it takes on leetcode is 104ms.
#
# The advantages of this solution are short, intuitive and easy to implement.
# The disadvantages of this solution are not very efficient and have to know all of the points previously,
# and it is unable to deal with real-time(online) case, it is an off-line solution.
#
# The short code shows as follows:
# 72ms 61.85%
class Solution {
public int[][] kClosest(int[][] points, int K) {
Arrays.sort(points, (p1, p2) -> p1[0] * p1[0] + p1[1] * p1[1] - p2[0] * p2[0] - p2[1] * p2[1]);
return Arrays.copyOfRange(points, 0, K);
}
}
# II. The second solution is based on the first one. We don't have to sort all points.
# Instead, we can maintain a max-heap with size K. Then for each point, we add it to the heap.
# Once the size of the heap is greater than K,
# we are supposed to extract one from the max heap to ensure the size of the heap is always K.
# Thus, the max heap is always maintain top K smallest elements from the first one to crruent one.
# Once the size of the heap is over its maximum capacity, it will exclude the maximum element in it,
# since it can not be the proper candidate anymore.
#
# Theoretically, the time complexity is O(NlogK), but practically, the real time it takes on leetcode is 134ms.
#
# The advantage of this solution is it can deal with real-time(online) stream data.
# It does not have to know the size of the data previously.
# The disadvantage of this solution is it is not the most efficient solution.
#
# The short code shows as follows:
# 79ms 56.66%
class Solution {
public int[][] kClosest(int[][] points, int K) {
PriorityQueue<int[]> pq
= new PriorityQueue<int[]>((p1, p2) -> p2[0] * p2[0] + p2[1] * p2[1] - p1[0] * p1[0] - p1[1] * p1[1]);
for (int[] p : points) {
pq.offer(p);
if (pq.size() > K) {
pq.poll();
}
}
int[][] res = new int[K][2];
while (K > 0) {
res[--K] = pq.poll();
}
return res;
}
}
# III. The last solution is based on quick sort, we can also call it quick select.
# In the quick sort, we will always choose a pivot to compare with other elements.
# After one iteration, we will get an array that all elements smaller than the pivot are on the left side of the pivot
# and all elements greater than the pivot are on the right side of the pivot
# (assuming we sort the array in ascending order).
# So, inspired from this, each iteration, we choose a pivot and then find the position p the pivot should be.
# Then we compare p with the K, if the p is smaller than the K,
# meaning the all element on the left of the pivot are all proper candidates but it is not adequate,
# we have to do the same thing on right side, and vice versa.
# If the p is exactly equal to the K, meaning that we've found the K-th position.
# Therefore, we just return the first K elements, since they are not greater than the pivot.
#
# Theoretically, the average time complexity is O(N) , but just like quick sort,
# in the worst case, this solution would be degenerated to O(N^2), and practically,
# the real time it takes on leetcode is 15ms.
#
# The advantage of this solution is it is very efficient.
# The disadvantage of this solution are it is neither an online solution nor a stable one.
# And the K elements closest are not sorted in ascending order.
#
# The short code shows as follows:
#
# 8ms 100%
class Solution {
public int[][] kClosest(int[][] points, int K) {
int len = points.length, l = 0, r = len - 1;
while (l <= r) {
int mid = helper(points, l, r);
if (mid == K) break;
if (mid < K) l = mid + 1;
else {
r = mid - 1;
}
}
return Arrays.copyOfRange(points, 0, K);
}
private int helper(int[][] A, int l, int r) {
int[] pivot = A[l];
while (l < r) {
while (l < r && compare(A[r], pivot) >= 0) r--;
A[l] = A[r];
while (l < r && compare(A[l], pivot) <= 0) l++;
A[r] = A[l];
}
A[l] = pivot;
return l;
}
private int compare(int[] p1, int[] p2) {
return p1[0] * p1[0] + p1[1] * p1[1] - p2[0] * p2[0] - p2[1] * p2[1];
}
}
'''
| if i >= j: return
oi, oj = i, j
pivot = dist(random.randint(i, j))
while i < j:
while i < j and dist(i) < pivot: i += 1
while i < j and dist(j) > pivot: j -= 1
points[i], points[j] = points[j], points[i]
if K <= i - oi + 1:
work(oi, i, K)
else:
work(i+1, oj, K - (i - oi + 1)) |
guildMemberAdd.js | const Discord = require("discord.js");
const captchagen = require("captchagen");
module.exports = class {
constructor (client) {
this.client = client;
};
async run (member) {
// Guild
let guild = member.guild;
// Get le data de la guild
const guildData = await this.client.db.findOrCreateGuild(guild);
member.guild.data = guildData;
if(!guildData.plugins.autorole || !Array.isArray(guildData.plugins.autorole)) guildData.plugins.autorole = [];
if(!guildData.autoroles) guildData.autoroles = 0;
// Get le data du member
const memberData = await this.client.db.findOrCreateMember(member, guild);
if(guildData.fortress) {
await member.send({
content: member.guild.translate("moderation/kick:KICK_DM", {
emoji: "door",
username: member.user.tag,
server: guild.name,
moderator: this.client.user.tag,
reason: member.guild.translate("misc:FORTRESS_ENABLED")
})
}).catch(() => {});
member.kick(member.guild.translate("misc:FORTRESS_ENABLED"))
};
// Check l'autorole
if(guildData.plugins.autorole.length !== 0 && !guildData.plugins.captcha.enabled && !member.user.bot) {
guildData.plugins.autorole.forEach(role => {
member.roles.add(role.role);
});
};
// Check le captcha
if(guildData.plugins.captcha.enabled) {
this.memberRoles = new Array();
if(guild.roles.cache.get(guildData.plugins.captcha.role) == "undefined") return;
if(member.user.bot) return;
const embed = new Discord.MessageEmbed()
.setAuthor(member.user.username, member.user.displayAvatarURL({ dynamic:true }))
.setFooter(this.client.cfg.footer)
.setColor(this.client.cfg.color.orange)
.setDescription(member.guild.translate("administration/captcha:WAIT", {
member: member.user.tag,
createDate: this.client.functions.printDate(member.user.createdAt),
time: this.client.functions.printDateFrom(member.user.createdAt),
}));
const failEmbed = new Discord.MessageEmbed()
.setAuthor(member.user.username, member.user.displayAvatarURL({ dynamic:true }))
.setFooter(this.client.cfg.footer)
.setColor(this.client.cfg.color.red)
.setDescription(this.client.emotes["arrow_down"] + " " + member.guild.translate("administration/captcha:FAIL", {
member: member.user.tag
}));
const noEmbed = new Discord.MessageEmbed()
.setAuthor(member.user.username, member.user.displayAvatarURL({ dynamic:true }))
.setFooter(this.client.cfg.footer)
.setColor(this.client.cfg.color.red)
.setDescription(this.client.emotes["arrow_down"] + " " + member.guild.translate("administration/captcha:NO", {
member: member.user.tag
}));
const succesEmbed = new Discord.MessageEmbed()
.setAuthor(member.user.username, member.user.displayAvatarURL({ dynamic:true }))
.setFooter(this.client.cfg.footer)
.setColor(this.client.cfg.color.green)
.setDescription(this.client.emotes["arrow_up"] + " " + member.guild.translate("administration/captcha:SUCCES", {
member: member.user.tag
}));
const opt = {
filter: (m) => m.author.id === member.id,
max: 1,
time: 120000,
errors: [ "time" ]
};
var captcha = captchagen.create();
captcha.text();
captcha.height();
captcha.width();
captcha.generate();
let attachment = new Discord.MessageAttachment(captcha.buffer(), "captcha.png")
const captchaChannel = guild.channels.cache.get(guildData.plugins.captcha.captchaChannel);
const captchaLogChannel = guild.channels.cache.get(guildData.plugins.captcha.logCaptchaChannel);
const captchaRole = guildData.plugins.captcha.role;
member.roles.add(captchaRole).catch(() => {});
let msg = await captchaChannel.send({
content: member.guild.translate("administration/captcha:CAPTCHA", {
mention: "<@" + member.user.id + ">"
}),
files: [attachment]
});
captchaLogChannel.send({
embeds: [embed]
});
await member.roles.cache.forEach(role => {
if(role.id == captchaRole) return;
member.roles.remove(role.id).catch(() => {});
this.memberRoles.push(role.id);
});
let collected = await captchaChannel.awaitMessages(opt).catch(() => {})
if(!collected || !collected.first()) {
msg.delete().catch(() => {});
await member.send({
content: member.guild.translate("moderation/kick:KICK_DM", {
emoji: "door",
username: member.user.tag,
server: guild.name,
moderator: this.client.user.tag,
reason: member.guild.translate("administration/captcha:NO_FILL")
})
}).catch(() => {});
member.kick(member.guild.translate("administration/captcha:NO_FILL"))
return captchaLogChannel.send({
embeds: [noEmbed]
});
}
let confMessage = collected.first().content;
collected.first().delete();
if(confMessage.toLowerCase() === captcha.text()) {
msg.delete().catch(() => {});
member.roles.remove(captchaRole);
await this.memberRoles.forEach(role => {
member.roles.add(role).catch(() => {})
});
this.memberRoles = new Array();
captchaLogChannel.send({
embeds: [succesEmbed]
});
// Autorole system
if(guildData.plugins.autorole.length !== 0) {
guildData.plugins.autorole.forEach(role => {
member.roles.add(role.role);
});
};
// Welcome system
if(guildData.plugins.welcome.enabled) {
let welcomeChannel = this.client.channels.cache.get(guildData.plugins.welcome.channel);
welcomeChannel.send({
content: guildData.plugins.welcome.message
.replace("{user}", member.user)
.replace("{user.nickname}", member.user.username)
.replace("{inviter}", "Unknow")
.replace("{guild.name}", guild.name)
.replace("{guild.members}", guild.memberCount)
});
};
} else {
msg.delete();
await member.send({
content: member.guild.translate("moderation/kick:KICK_DM", {
emoji: "door",
username: member.user.tag,
server: guild.name,
moderator: this.client.user.tag, |
member.kick(member.guild.translate("administration/captcha:BAD_FILL"));
captchaLogChannel.send({
embeds: [failEmbed]
});
};
};
// Check le système de bienvenue
if(guildData.plugins.welcome.enabled && !guildData.plugins.captcha.enabled) {
let channel = this.client.channels.cache.get(guildData.plugins.welcome.channel);
channel.send({
content: guildData.plugins.welcome.message
.replace("{user}", member.user)
.replace("{user.nickname}", member.user.username)
.replace("{inviter}", "Unknow")
.replace("{guild.name}", guild.name)
.replace("{guild.members}", guild.memberCount)
});
};
if(!guildData.plugins.welcomeDM) guildData.plugins.welcomeDM = null;
// Check le système de bienvenue en message privés
if(guildData.plugins.welcomeDM !== null) {
member.send({
content: guildData.plugins.welcomeDM
.replace("{user}", member.user)
.replace("{guild.name}", guild.name)
.replace("{guild.members}", guild.memberCount)
}).catch(() => {});
};
};
}; | reason: member.guild.translate("administration/captcha:BAD_FILL")
})
}).catch(() => {}); |
codemirror.ts | import VueCodemirror from 'vue-codemirror';
import 'codemirror/lib/codemirror.css';
// language
import 'codemirror/mode/javascript/javascript.js';
// theme css
import 'codemirror/theme/neo.css';
// require active-line.js
import 'codemirror/addon/selection/active-line.js';
// styleSelectedText
import 'codemirror/addon/selection/mark-selection.js';
import 'codemirror/addon/search/searchcursor.js';
// hint
import 'codemirror/addon/hint/show-hint.js';
import 'codemirror/addon/hint/show-hint.css';
import 'codemirror/addon/hint/javascript-hint.js';
import 'codemirror/addon/selection/active-line.js';
// highlightSelectionMatches
import 'codemirror/addon/scroll/annotatescrollbar.js';
import 'codemirror/addon/search/matchesonscrollbar.js';
import 'codemirror/addon/search/searchcursor.js';
import 'codemirror/addon/search/match-highlighter.js';
// keyMap
import 'codemirror/mode/clike/clike.js'; | import 'codemirror/addon/dialog/dialog.js';
import 'codemirror/addon/dialog/dialog.css';
import 'codemirror/addon/search/searchcursor.js';
import 'codemirror/addon/search/search.js';
import 'codemirror/keymap/sublime.js';
// foldGutter
import 'codemirror/addon/fold/foldgutter.css';
import 'codemirror/addon/fold/brace-fold.js';
import 'codemirror/addon/fold/comment-fold.js';
import 'codemirror/addon/fold/foldcode.js';
import 'codemirror/addon/fold/foldgutter.js';
import 'codemirror/addon/fold/indent-fold.js';
import 'codemirror/addon/fold/markdown-fold.js';
import 'codemirror/addon/fold/xml-fold.js';
export default function codeMirrorInstall(Vue: any) {
Vue.use(VueCodemirror, {
options: {
tabSize: 2,
styleActiveLine: true,
lineNumbers: true,
styleSelectedText: false,
line: true,
foldGutter: true,
gutters: ['CodeMirror-linenumbers', 'CodeMirror-foldgutter'],
mode: 'text/javascript',
hintOptions: {
completeSingle: false
},
keyMap: 'sublime',
matchBrackets: true,
showCursorWhenSelecting: true,
theme: 'neo',
extraKeys: { Ctrl: 'autocomplete' }
}
});
} | import 'codemirror/addon/edit/matchbrackets.js';
import 'codemirror/addon/comment/comment.js'; |
lib.rs | #![deny(unreachable_patterns)]
use ocelotter_runtime::constant_pool::*;
use ocelotter_runtime::interp_stack::InterpEvalStack;
use ocelotter_runtime::klass_repo::SharedKlassRepo;
use ocelotter_runtime::otklass::OtKlass;
use ocelotter_runtime::otmethod::OtMethod;
use ocelotter_runtime::*;
pub mod opcode;
use opcode::*;
pub fn exec_method(
repo: &mut SharedKlassRepo,
meth: &OtMethod,
lvt: &mut InterpLocalVars,
) -> Option<JvmValue> {
dbg!(meth.clone());
// dbg!(meth.get_flags());
if meth.is_native() {
// Explicit type hint here to document the type of n_f
let n_f: fn(&InterpLocalVars) -> Option<JvmValue> = meth.get_native_code().expect(
&format!("Native code not found {}", meth.get_fq_name_desc()),
);
// FIXME Parameter passing
n_f(lvt)
} else {
exec_bytecode_method(repo, meth.get_klass_name(), &meth.get_code(), lvt)
}
}
pub fn exec_bytecode_method(
repo: &mut SharedKlassRepo,
klass_name: String,
instr: &Vec<u8>,
lvt: &mut InterpLocalVars,
) -> Option<JvmValue> {
let mut current = 0;
let mut eval = InterpEvalStack::of();
loop {
// let my_klass_name = klass_name.clone();
let ins: u8 = *instr
.get(current)
.expect(&format!("Byte {} has no value", current));
current += 1;
// dbg!(ins);
match ins {
Opcode::ACONST_NULL => eval.aconst_null(),
Opcode::ALOAD => {
eval.push(lvt.load(instr[current]));
current += 1;
}
Opcode::ALOAD_0 => eval.push(lvt.load(0)),
Opcode::ALOAD_1 => eval.push(lvt.load(1)),
Opcode::ARETURN => break Some(eval.pop()),
Opcode::ASTORE => {
lvt.store(instr[current], eval.pop());
current += 1;
}
Opcode::ASTORE_0 => lvt.store(0, eval.pop()),
Opcode::ASTORE_1 => lvt.store(1, eval.pop()),
Opcode::BIPUSH => {
eval.iconst(instr[current] as i32);
current += 1;
}
Opcode::DADD => eval.dadd(),
Opcode::DCONST_0 => eval.dconst(0.0),
Opcode::DCONST_1 => eval.dconst(1.0),
Opcode::DLOAD => {
eval.push(lvt.load(instr[current]));
current += 1;
}
Opcode::DLOAD_0 => eval.push(lvt.load(0)),
Opcode::DLOAD_1 => eval.push(lvt.load(1)),
Opcode::DLOAD_2 => eval.push(lvt.load(2)),
Opcode::DLOAD_3 => eval.push(lvt.load(3)),
Opcode::DRETURN => break Some(eval.pop()),
Opcode::DSTORE => {
lvt.store(instr[current], eval.pop());
current += 1;
}
Opcode::DSTORE_0 => lvt.store(0, eval.pop()),
Opcode::DSTORE_1 => lvt.store(1, eval.pop()),
Opcode::DSTORE_2 => lvt.store(2, eval.pop()),
Opcode::DSTORE_3 => lvt.store(3, eval.pop()),
Opcode::DSUB => eval.dsub(),
Opcode::DUP => eval.dup(),
Opcode::DUP_X1 => eval.dupX1(),
Opcode::GETFIELD => {
let cp_lookup = ((instr[current] as u16) << 8) + instr[current + 1] as u16;
current += 2;
let recvp: JvmValue = eval.pop();
let obj_id = match recvp {
JvmValue::ObjRef { val: v } => v,
_ => panic!("Not an object ref at {}", (current - 1)),
};
let heap = HEAP.lock().unwrap();
let obj = heap.get_obj(obj_id).clone();
let getf = repo.lookup_instance_field(&klass_name, cp_lookup);
let ret = obj.get_field_value(getf.get_offset() as usize);
eval.push(ret);
}
Opcode::GETSTATIC => {
let cp_lookup = ((instr[current] as u16) << 8) + instr[current + 1] as u16;
current += 2;
let getf = repo.lookup_static_field(&klass_name, cp_lookup).clone();
let klass = repo.lookup_klass(&getf.get_klass_name()).clone();
let ret = klass.get_static_field_value(&getf);
eval.push(ret.clone());
}
Opcode::GOTO => {
current += ((instr[current] as usize) << 8) + instr[current + 1] as usize
}
Opcode::I2D => eval.i2d(),
Opcode::IADD => eval.iadd(),
Opcode::IALOAD => {
let pos_to_load = match eval.pop() {
JvmValue::Int { val: v } => v,
_ => panic!("Non-int seen on stack during IASTORE at {}", current - 1),
};
let arrayid = match eval.pop() {
JvmValue::ObjRef { val: v } => v,
_ => panic!("Non-objref seen on stack during IASTORE at {}", current - 1),
};
dbg!(arrayid.clone());
let unwrapped_val = match HEAP.lock().unwrap().get_obj(arrayid) {
ocelotter_runtime::object::OtObj::vm_arr_int {
id: _,
mark: _,
klassid: _,
length: _,
elements: elts,
} => elts[pos_to_load as usize],
_ => panic!("Non-int[] seen on stack during IASTORE at {}", current - 1),
};
eval.push(JvmValue::Int { val: unwrapped_val });
}
Opcode::IAND => eval.iand(),
| };
let pos_to_store = match eval.pop() {
JvmValue::Int { val: v } => v,
_ => panic!("Non-int seen on stack during IASTORE at {}", current - 1),
};
let obj_id = match eval.pop() {
JvmValue::ObjRef { val: v } => v,
_ => panic!("Non-objref seen on stack during IASTORE at {}", current - 1),
};
HEAP.lock()
.unwrap()
.iastore(obj_id, pos_to_store, val_to_store);
}
Opcode::ICONST_0 => eval.iconst(0),
Opcode::ICONST_1 => eval.iconst(1),
Opcode::ICONST_2 => eval.iconst(2),
Opcode::ICONST_3 => eval.iconst(3),
Opcode::ICONST_4 => eval.iconst(4),
Opcode::ICONST_5 => eval.iconst(5),
Opcode::ICONST_M1 => eval.iconst(-1),
Opcode::IDIV => eval.idiv(),
Opcode::IF_ICMPEQ => {
let jump_to = (instr[current] as usize) << 8 + instr[current + 1] as usize;
if massage_to_int_and_compare(eval.pop(), eval.pop(), |i: i32, j: i32| -> bool {
i == j
}) {
current += jump_to;
} else {
current += 2;
}
}
Opcode::IF_ICMPGT => {
let jump_to = (instr[current] as usize) << 8 + instr[current + 1] as usize;
if massage_to_int_and_compare(eval.pop(), eval.pop(), |i: i32, j: i32| -> bool {
i > j
}) {
current += jump_to;
} else {
current += 2;
}
}
Opcode::IF_ICMPLT => {
let jump_to = (instr[current] as usize) << 8 + instr[current + 1] as usize;
if massage_to_int_and_compare(eval.pop(), eval.pop(), |i: i32, j: i32| -> bool {
i < j
}) {
current += jump_to;
} else {
current += 2;
}
}
Opcode::IF_ICMPNE => {
let jump_to = (instr[current] as usize) << 8 + instr[current + 1] as usize;
if massage_to_int_and_compare(eval.pop(), eval.pop(), |i: i32, j: i32| -> bool {
i == j
}) {
current += 2;
} else {
current += jump_to;
}
}
// Opcode::IFEQ => {
// let jump_to = (instr[current] as usize) << 8 + instr[current + 1] as usize;
// let i = match eval.pop() {
// }
// if == 0 {
// current += jump_to;
// } else {
// current += 2;
// }
// } ,
// Opcode::IFGE => {
// v = eval.pop();
// jump_to = ((int) instr[current++] << 8) + (int) instr[current++];
// if (v.value >= 0L) {
// current += jump_to - 1; // The -1 is necessary as we've already inc'd current
// }
// } ,
// Opcode::IFGT => {
// v = eval.pop();
// jump_to = ((int) instr[current++] << 8) + (int) instr[current++];
// if (v.value > 0L) {
// current += jump_to - 1; // The -1 is necessary as we've already inc'd current
// }
// },
// Opcode::IFLE => {
// v = eval.pop();
// jump_to = ((int) instr[current++] << 8) + (int) instr[current++];
// if (v.value <= 0L) {
// current += jump_to - 1; // The -1 is necessary as we've already inc'd current
// }
// },
// Opcode::IFLT => {
// v = eval.pop();
// jump_to = ((int) instr[current++] << 8) + (int) instr[current++];
// if (v.value < 0L) {
// current += jump_to - 1; // The -1 is necessary as we've already inc'd current
// }
// },
// Opcode::IFNE => {
// v = eval.pop();
// jump_to = ((int) instr[current] << 8) + (int) instr[current + 1];
// if (v.value != 0L) {
// current += jump_to - 1; // The -1 is necessary as we've already inc'd current
// }
// },
Opcode::IFNONNULL => {
let jump_to = ((instr[current] as usize) << 8) + instr[current + 1] as usize;
match eval.pop() {
JvmValue::ObjRef { val: v } => {
if v > 0 {
current += jump_to;
} else {
current += 2;
}
}
_ => panic!(
"Value not of reference type found for IFNULL at {}",
(current - 1)
),
};
}
Opcode::IFNULL => {
let jump_to = ((instr[current] as usize) << 8) + instr[current + 1] as usize;
match eval.pop() {
JvmValue::ObjRef { val: v } => {
if v == 0 {
current += jump_to;
} else {
current += 2;
}
}
_ => panic!(
"Value not of reference type found for IFNULL at {}",
(current - 1)
),
};
}
Opcode::IINC => {
lvt.iinc(instr[current], instr[current + 1]);
current += 2;
}
Opcode::ILOAD => {
eval.push(lvt.load(instr[current]));
current += 1
}
Opcode::ILOAD_0 => eval.push(lvt.load(0)),
Opcode::ILOAD_1 => eval.push(lvt.load(1)),
Opcode::ILOAD_2 => eval.push(lvt.load(2)),
Opcode::ILOAD_3 => eval.push(lvt.load(3)),
Opcode::IMUL => eval.imul(),
Opcode::INEG => eval.ineg(),
Opcode::INVOKESPECIAL => {
let cp_lookup = ((instr[current] as u16) << 8) + instr[current + 1] as u16;
current += 2;
let current_klass = repo.lookup_klass(&klass_name).clone();
dispatch_invoke(repo, current_klass, cp_lookup, &mut eval, 1);
}
Opcode::INVOKESTATIC => {
let cp_lookup = ((instr[current] as u16) << 8) + instr[current + 1] as u16;
current += 2;
let current_klass = repo.lookup_klass(&klass_name).clone();
// dbg!(current_klass.clone());
dispatch_invoke(repo, current_klass, cp_lookup, &mut eval, 0);
}
Opcode::INVOKEVIRTUAL => {
// FIXME DOES NOT ACTUALLY DO VIRTUAL LOOKUP YET
let cp_lookup = ((instr[current] as u16) << 8) + instr[current + 1] as u16;
current += 2;
let current_klass = repo.lookup_klass(&klass_name).clone();
dbg!(current_klass.clone());
dispatch_invoke(repo, current_klass, cp_lookup, &mut eval, 1);
}
Opcode::IOR => eval.ior(),
Opcode::IREM => eval.irem(),
Opcode::IRETURN => break Some(eval.pop()),
Opcode::ISTORE => {
lvt.store(instr[current], eval.pop());
current += 1;
}
Opcode::ISTORE_0 => lvt.store(0, eval.pop()),
Opcode::ISTORE_1 => lvt.store(1, eval.pop()),
Opcode::ISTORE_2 => lvt.store(2, eval.pop()),
Opcode::ISTORE_3 => lvt.store(3, eval.pop()),
Opcode::ISUB => eval.isub(),
Opcode::L2I => {
match eval.pop() {
JvmValue::Long { val: v } => eval.push(JvmValue::Int { val: v as i32 }),
_ => panic!("Value not of long type found for L2I at {}", (current - 1)),
};
}
Opcode::LDC => {
let cp_lookup = instr[current] as u16;
current += 1;
let current_klass = repo.lookup_klass(&klass_name).clone();
match current_klass.lookup_cp(cp_lookup) {
// FIXME Actually look up the class object properly
CpEntry::class { idx: _ } => eval.aconst_null(),
CpEntry::double { val: dcon } => eval.dconst(dcon),
CpEntry::integer { val: icon } => eval.iconst(icon),
// FIXME Actually look up the class object properly
CpEntry::string { idx: _ } => eval.aconst_null(),
_ => panic!(
"Non-handled entry found in LDC op {} at CP index {}",
current_klass.get_name(),
cp_lookup
),
}
}
// FIXME TEMP
Opcode::MONITORENTER => {
eval.pop();
}
// FIXME TEMP
Opcode::MONITOREXIT => {
eval.pop();
}
Opcode::NEW => {
let cp_lookup = ((instr[current] as u16) << 8) + instr[current + 1] as u16;
current += 2;
let current_klass = repo.lookup_klass(&klass_name).clone();
let alloc_klass_name = match current_klass.lookup_cp(cp_lookup) {
// FIXME Find class name from constant pool of the current class
CpEntry::class { idx } => current_klass.cp_as_string(idx), // "DUMMY_CLASS".to_string(),
_ => panic!(
"Non-class found in {} at CP index {}",
current_klass.get_name(),
cp_lookup
),
};
dbg!(alloc_klass_name.clone());
let object_klass = repo.lookup_klass(&alloc_klass_name).clone();
let obj_id = HEAP.lock().unwrap().allocate_obj(&object_klass);
eval.push(JvmValue::ObjRef { val: obj_id });
}
Opcode::NEWARRAY => {
let arr_type = instr[current];
current += 1;
// FIXME Other primitive array types needed
let arr_id = match arr_type {
// boolean: 4
// char: 5
// float: 6
// double: 7
// byte: 8
// short: 9
// int: 10
// long: 11
10 => match eval.pop() {
JvmValue::Int { val: arr_size } => {
HEAP.lock().unwrap().allocate_int_arr(arr_size)
}
_ => panic!("Not an int on the stack at {}", (current - 1)),
},
_ => panic!("Unsupported primitive array type at {}", (current - 1)),
};
eval.push(JvmValue::ObjRef { val: arr_id });
}
Opcode::NOP => {
();
}
Opcode::POP => {
eval.pop();
}
Opcode::POP2 => {
let _discard: JvmValue = eval.pop();
// FIXME Change to type match
// if (discard.type == JVMType.J || discard.type == JVMType.D) {
// }
eval.pop();
}
Opcode::PUTFIELD => {
let cp_lookup = ((instr[current] as u16) << 8) + instr[current + 1] as u16;
current += 2;
let val = eval.pop();
let recvp: JvmValue = eval.pop();
let obj_id = match recvp {
JvmValue::ObjRef { val: v } => v,
_ => panic!("Not an object ref at {}", (current - 1)),
};
let putf = repo.lookup_instance_field(&klass_name, cp_lookup);
HEAP.lock().unwrap().put_field(obj_id, putf, val);
}
Opcode::PUTSTATIC => {
let cp_lookup = ((instr[current] as u16) << 8) + instr[current + 1] as u16;
current += 2;
let puts = repo.lookup_static_field(&klass_name, cp_lookup);
let klass_name = puts.get_klass_name();
// FIXME IMPL IS BROKEN
repo.put_static(klass_name, puts, eval.pop());
}
Opcode::RETURN => break None,
Opcode::SIPUSH => {
let vtmp = ((instr[current] as i32) << 8) + instr[current + 1] as i32;
eval.iconst(vtmp);
current += 2;
}
Opcode::SWAP => {
let val1 = eval.pop();
let val2 = eval.pop();
eval.push(val1);
eval.push(val2);
}
// Disallowed opcodes
Opcode::BREAKPOINT => break Some(JvmValue::Boolean { val: false }),
Opcode::IMPDEP1 => break Some(JvmValue::Boolean { val: false }),
Opcode::IMPDEP2 => break Some(JvmValue::Boolean { val: false }),
Opcode::JSR => break Some(JvmValue::Boolean { val: false }),
Opcode::JSR_W => break Some(JvmValue::Boolean { val: false }),
Opcode::RET => break Some(JvmValue::Boolean { val: false }),
_ => panic!(
"Illegal opcode byte: {} encountered at position {}. Stopping.",
ins,
(current - 1)
),
}
}
}
fn massage_to_int_and_compare(v1: JvmValue, v2: JvmValue, f: fn(i: i32, j: i32) -> bool) -> bool {
match v1 {
JvmValue::Int { val: i } => match v2 {
JvmValue::Int { val: i1 } => f(i, i1),
_ => panic!("Values found to have differing type for IF_ICMP*"),
},
_ => panic!("Values found to have the wrong type for IF_ICMP*"),
}
}
fn dispatch_invoke(
repo: &mut SharedKlassRepo,
current_klass: OtKlass,
cp_lookup: u16,
eval: &mut InterpEvalStack,
additional_args: u8,
) -> () {
let fq_name_desc = current_klass.cp_as_string(cp_lookup);
let klz_idx = match current_klass.lookup_cp(cp_lookup) {
CpEntry::methodref { clz_idx, nt_idx: _ } => clz_idx,
_ => panic!(
"Non-methodref found in {} at CP index {}",
current_klass.get_name(),
cp_lookup
),
};
let dispatch_klass_name = current_klass.cp_as_string(klz_idx);
let callee = repo.lookup_method_exact(&dispatch_klass_name, fq_name_desc);
// FIXME - General setup requires call args from the stack
let mut vars = InterpLocalVars::of(255);
if additional_args > 0 {
vars.store(0, eval.pop());
}
// Explicit use of match expression to be clear about the semantics
match exec_method(repo, &callee, &mut vars) {
Some(val) => eval.push(val),
None => (),
}
}
// fn parse_class(bytes: Vec<u8>, fname: String) -> OtKlass {
// let mut parser = klass_parser::OtKlassParser::of(bytes, fname);
// parser.parse();
// parser.klass()
// }
#[cfg(test)]
mod tests; | Opcode::IASTORE => {
let val_to_store = match eval.pop() {
JvmValue::Int { val: v } => v,
_ => panic!("Non-int seen on stack during IASTORE at {}", current - 1), |
insight-ethereum-transaction-service.ts | import { TransactionService } from './../../../api/transaction-service';
import { TransactionFilter } from './../../../api/transaction-filter';
import { CryptocurrencyService, ETHEREUM } from './../../index';
import { HttpClient } from '@angular/common/http';
import { Transaction } from './../../../api/transaction';
const ETHERSCAN_API_KEY = "DEI8KRP8S6ZFDWEWMZSI8NPZAX6RMFESDE";
export class InsightEthereumTransactionService implements TransactionService {
constructor(
protected http:HttpClient,
protected serviceUrl:string,
protected cryptocurrency:string = ETHEREUM) {}
findTransactions(filter:TransactionFilter) : Promise<Transaction[]> {
return new Promise((resolve, reject) => {
this.http.get(this.buildUrl(filter))
.subscribe(response => {
resolve(this.parseTransactions(filter.addresses, response, filter.txid));
}, () => { reject(); });
});
}
parseTransaction(transaction:any, addresses:string[]) {
if (transaction) {
let addressIndex = addresses.indexOf(transaction.to);
if (addressIndex >= 0) {
// TODO: same address in vin multiple times?
return {
amount : parseFloat(transaction.value) ,
incomming : true ,
address : addresses[addressIndex]
};
}
}
return null;
}
parseTransactions(addresses:string[], json:any, txHash: string) : Transaction[] {
let output = [];
let items = [];
if (json.result && json.result.length) {
items = json.result;
} else {
items.push(json);
}
if (txHash) {
items = items.filter(item => item.hash == txHash)
}
for (let item of items) {
let tx:any = this.parseTransaction(item, addresses)
if (tx) {
tx.currency = "ETH";
tx._id = item.hash;
tx.confirmations = parseInt(item.confirmations);
tx.timestamp = parseInt(item.timeStamp);
output.push(tx);
}
}
return output;
}
/**
* returns index of addressesAsSearchInput array, which is found inside
* addressesToSearchFor array
*/
addressIndex(addressesToSearchFor:string[], addressesAsSearchInput:string[]) : number {
for (let i = 0; i < addressesAsSearchInput.length; i++) {
if (addressesToSearchFor.indexOf(addressesAsSearchInput[i]) >= 0) {
return i;
}
}
return -1;
}
buildUrl(filter:any = {}) : string {
let url = "";
if (filter.addresses && filter.addresses.length > 0) {
url = this.serviceUrl + "/api";
url += '?module=account&action=txlist&address=' + filter.addresses.join(',') + '&startblock=0&endblock=99999999&sort=asc&apikey=' + ETHERSCAN_API_KEY;
}
// TODO Use filters for perf
//if (filter.from >= 0 && filter.to > 0) {
// url += '?from=' + filter.from + '&to=' + filter.to; | return url;
}
} | //}
|
number_test.go | // Copyright © 2018 Harry Bagdi <[email protected]>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package cmd
import (
"testing"
)
func TestNumberCmd(t *testing.T) {
tests := []testItem{
{
Name: "basic",
Input: "number",
Output: "377457747\n",
IsErr: false,
},
{
Name: "repeat",
Input: "number -c 2",
Output: "377457747\n532387611\n",
IsErr: false, | {
Name: "unknown flag",
Input: "number --unknown ",
SubString: "Error: unknown flag: --unknown",
IsErr: true,
},
}
runTestTable(t, tests)
} | }, |
orden.schema.ts | import { Schema } from "mongoose";
export const OrdenSchema = new Schema({
idConcurso:{
type:String,
required:true
},
categoria:{
type:String,
required:true
},
ordenProyectos:{
type:Array,
required:true
},
tituloConcurso:{
type:String,
required:true
},
estado:{ | default:"I"
}
}) | type:String, |
leap-seconds.ts | /** julian date, offset (seconds) */
export type LeapSecondsData = [number, number];
/** List of leap seconds by Julian Date. */
export let LEAP_SECONDS: LeapSecondsData[] = [
[2437300.5, 1.422818],
[2437512.5, 1.372818],
[2437665.5, 1.845858],
[2438334.5, 1.845858],
[2438395.5, 3.24013],
[2438486.5, 3.34013],
[2438639.5, 3.44013],
[2438761.5, 3.54013],
[2438820.5, 3.64013],
[2438942.5, 3.74013],
[2439004.5, 3.84013],
[2439126.5, 4.31317],
[2439887.5, 4.21317],
[2441317.5, 10.0],
[2441499.5, 11.0],
[2441683.5, 12.0],
[2442048.5, 13.0],
[2442413.5, 14.0],
[2442778.5, 15.0],
[2443144.5, 16.0],
[2443509.5, 17.0],
[2443874.5, 18.0],
[2444239.5, 19.0], | [2444786.5, 20.0],
[2445151.5, 21.0],
[2445516.5, 22.0],
[2446247.5, 23.0],
[2447161.5, 24.0],
[2447892.5, 25.0],
[2448257.5, 26.0],
[2448804.5, 27.0],
[2449169.5, 28.0],
[2449534.5, 29.0],
[2450083.5, 30.0],
[2450630.5, 31.0],
[2451179.5, 32.0],
[2453736.5, 33.0],
[2454832.5, 34.0],
[2456109.5, 35.0],
[2457204.5, 36.0],
[2457754.5, 37.0]
]; | |
__init__.py |
from .update import Update
from .stager import Stager | __copyright__ = "Copyright 2016, http://radical.rutgers.edu"
__license__ = "MIT" |
|
etender_service.py | # -*- coding: utf-8 -
from iso8601 import parse_date
from datetime import datetime, date, time, timedelta
import dateutil.parser
from pytz import timezone
import os
from decimal import Decimal
import re
TZ = timezone(os.environ['TZ'] if 'TZ' in os.environ else 'Europe/Kiev')
def get_all_etender_dates(initial_tender_data):
tender_period = initial_tender_data.tenderPeriod
start_dt = dateutil.parser.parse(tender_period['startDate'])
end_dt = dateutil.parser.parse(tender_period['endDate'])
data = type('periods', (), { # dynamically creating objects instead of another dict
'tenderStart': type('date', (), {'date': start_dt.strftime("%d-%m-%Y"),
'time': start_dt.strftime("%H:%M")}),
'tenderEnd': type('date', (), {'date': end_dt.strftime("%d-%m-%Y"),
'time': end_dt.strftime("%H:%M")})})
if 'enquiryPeriod' in initial_tender_data:
end_period = dateutil.parser.parse(initial_tender_data.enquiryPeriod['endDate'])
data.enquiryEnd = type('date', (), {'date': end_period.strftime("%d-%m-%Y"),
'time': end_period.strftime("%H:%M")})
return data
def get_procedure_type(methodType):
| діалог 1-ий етап': 'competitiveDialogueUA',
u'конкурентний діалог 2-ий етап': 'competitiveDialogueUA.stage2',
u'звіт про укладений договір': 'reporting',
u'відкриті торги для закупівлі енергосервісу': 'open_esco',
u'відкриті торги для закупівлі енергосервісу': 'esco',
u'конкурентний діалог з публікацією англійською мовою 1-ий етап': 'competitiveDialogueEU',
u'конкурентний діалог з публікацією англійською мовою 2-ий етап': 'competitiveDialogueEU.stage2',
u'відкриті торги для укладання рамкової угоди': 'closeFrameworkAgreementUA',
u'відкриті торгии для укладання рамкової угоди': 'open_framework'
}[procedure_name]
def parse_etender_date(date, as_string=False):
# converts date from ui to datetime
d = datetime.strptime(date, '%d-%m-%Y, %H:%M')
if as_string:
return str(d)
return d
def cut_letters_and_parse_etender_date(date, as_string=True):
# converts date from ui
d = datetime.strptime(date.split(' ')[1], '%d-%m-%Y')
if as_string:
return str(d)
return d
def prepare_locator_to_scroll(locator):
if locator[:3] == 'id=':
return '//*[@id="{}"]'.format(locator[3:])
return locator[6:].replace("'", '"') # 6 for xpath=
def to_iso(date):
return date.isoformat()
def convert_etender_date_to_iso_format(date):
return TZ.localize(parse_etender_date(date)).isoformat()
def convet_fra_to_variable(raw):
b = re.findall(r'P(\d+)Y(\d+)M(\d+)D.*', raw)
c, d, e = b[0]
return c, d, e
def convet_raw_to_chack(raw):
raw = raw.replace(' ', '')
b = re.findall(r'(\d+)р(\d+)м(\d+)д', raw)
c, d, e = b[0]
return c, d, e
def get_year_from_full_date(string):
data_as_str = string.split('T')[0]
data_as_datetime = datetime.strptime(data_as_str, '%Y-%m-%d')
return str(data_as_datetime.year)
def convert_date_to_etender_format(isodate):
iso_dt = parse_date(isodate)
date_string = iso_dt.strftime("%d-%m-%Y")
return date_string
def convert_datetime_for_delivery(isodate):
iso_dt = parse_date(isodate)
date_string = iso_dt.strftime("%Y-%m-%d %H:%M")
return date_string
def convert_time_to_etender_format(isodate):
iso_dt = parse_date(isodate)
time_string = iso_dt.strftime("%H:%M")
return time_string
def float_to_string_2f(value):
return '{:.2f}'.format(value)
def float_to_string_3f(value):
return '{:.3f}'.format(value)
def string_to_float(string):
return float(string)
def change_data(initial_data):
#TODO: remove redundant hardcoded values
# initial_data['data']['procuringEntity']['identifier']['legalName'] = u"TenderOwner#"
# initial_data['data']['procuringEntity']['identifier']['id'] = u"88008800"
# initial_data['data']['procuringEntity']['name'] = u"TenderOwner#"
initial_data['data']['items'][0]['deliveryAddress']['locality'] = u"м. Київ"
initial_data['data']['items'][0]['deliveryAddress']['region'] = u"Київська область"
initial_data['data']['procuringEntity']['address']['locality'] = u"Алупка"
initial_data['data']['procuringEntity']['address']['postalCode'] = u"13531"
initial_data['data']['procuringEntity']['address']['region'] = u"АР Крим"
initial_data['data']['procuringEntity']['address']['streetAddress'] = u"Фрунзе, 666"
initial_data['data']['procuringEntity']['contactPoint']['name'] = u"Владелец Этого Тендера"
initial_data['data']['procuringEntity']['contactPoint']['telephone'] = u"613371488228"
initial_data['data']['procuringEntity']['contactPoint']['url'] = u"http://e-tender.ua/"
return initial_data
def change_data_for_tender_owner(initial_data):
initial_data['data']['procuringEntity']['identifier']['legalName'] = u"TenderOwner#"
initial_data['data']['procuringEntity']['identifier']['id'] = u"88008800"
initial_data['data']['procuringEntity']['name'] = u"TenderOwner#"
return initial_data
def change_buyers_data(initial_data):
initial_data['data']['buyers'][0]['name'] = u"TenderOwner#"
initial_data['data']['buyers'][0]['identifier']['id'] = u"88008800"
initial_data['data']['buyers'][0]['identifier']['legalName'] = u"TenderOwner#"
initial_data['data']['procuringEntity']['name'] = initial_data['data']['buyers'][0]['name']
initial_data['data']['procuringEntity']['identifier']['id'] = initial_data['data']['buyers'][0]['identifier']['id']
initial_data['data']['procuringEntity']['identifier']['legalName'] = \
initial_data['data']['buyers'][0]['identifier']['legalName']
return initial_data
def convert_etender_date_to_iso_format_and_add_timezone(date):
return TZ.localize(parse_etender_date(date)).isoformat()
def get_time_now():
time_string = datetime.now().strftime("%H:%M")
return time_string
def get_date_now():
date_string = datetime.now().strftime("%d-%m-%Y")
return date_string
def get_date_10d_future():
date_string = (datetime.now() + timedelta(days=10)).strftime("%d-%m-%Y")
return date_string
def get_time_offset(add_minutes=17):
_now = datetime.now() + timedelta(minutes=add_minutes)
return _now.time().strftime('%H:%M')
def convert_common_string_to_etender_string(string):
dict = get_helper_dictionary()
for key, val in dict.iteritems():
if val == string:
return key
return string
def parse_currency_value_with_spaces(raw):
# to convert raw values like '2 216 162,83 UAH' to string which is ready for conversion to float
return ''.join(raw.split(' ')[:-1]).replace(',', '.')
def get_minimalStep_currency(raw_value):
# to get currency 'UAH' from raw values like '2 216 162,83 UAH'
result_dic = raw_value.split(' ')
result = result_dic[-1]
return result
def parse_currency_value_with_spaces_percentage(raw):
# to convert raw values like '1,3244%' to string which is ready for conversion to float
result = raw.replace('%', '')
result = Decimal(result)
result = (result / 100)
result = float(result)
return result
def parse_currency_value_with_spaces_percentage_NBU(raw):
# to convert raw values like 'Hi – 1,3244%' to string which is ready for conversion to float
result = raw.split(' ', 4)[4]
result = result.replace('%', '')
result = Decimal(result)
result = (result / 100)
result = float(result)
return result
def convert_etender_string_to_common_string(string):
return get_helper_dictionary().get(string, string)
def get_helper_dictionary():
return {
u"КЛАСИФІКАТОР ДК 021:2015 (CPV)": u"ДК021",
u"кг.": u"кілограм",
u"грн.": u"UAH",
u"(з ПДВ)": True,
u"з ПДВ": True,
u"без ПДВ": False,
# TODO: remove this temporary workaround, consult with quinta team about input data
u"Дніпро": u"Дніпропетровськ",
#tender statuses
u'період уточнень': u'active.enquiries',
u'очікування пропозицій': u'active.tendering',
u'прекваліфікація': u'active.pre-qualification',
u'оцінка пропозицій': u'active.pre-qualification',
u'блокування перед аукціоном': u'active.pre-qualification.stand-still',
u'проведення переговорів': u'active.pre-qualification.stand-still',
u'перший проміжний етап': u'active.stage2.pending',
u'період аукціону': u'active.auction',
u'кваліфікація переможця': u'active.qualification',
u'пропозиції розглянуто': u'active.awarded',
u'завершена закупівля': u'complete',
u'перший етап завершено': u'complete',
u'закупівля не відбулась': u'unsuccessful',
u'відмінена закупівля': u'cancelled',
#bid statuses
u'Пропозиція не дійсна': u'invalid',
u"ст.35 ч. 2 п. 1": u"artContestIP",
u"ст.35 ч. 2 п. 2": u"noCompetition",
u"ст.35 ч. 2 п. 4": u"twiceUnsuccessful",
u"ст.35 ч. 2 п. 5": u"additionalPurchase",
u"ст.35 ч. 2 п. 6": u"additionalConstruction",
u"ст.35 ч. 2 п. 7": u"stateLegalServices",
u"Договір поки що не опубліковано": u"pending",
u"Договір опубліковано": u"active",
u"Переможець торгів": u"active",
u"учасник виграв закупівлю": u"active",
u'вимога': u'claim',
u'відповідь надана': u'answered',
u'задоволено': u'resolved',
u'не задоволено': u'declined',
u'скасована скаржником': u'cancelled',
u'відхилено': u'invalid',
u'залишена без відповіді': u'ignored',
u'очікується кваліфікація': u'pending',
u'відкликається скаржником': u'stopping',
u'очікує розгляду органом оскарження': u'pending',
u'Співфінансування з бюджетних коштів': u'budget',
u'на розгляді': u'pending',
u'Пропозиція не активована': u'invalid'
}
def get_feature_index(i):
return {0.05: '1',
0.01: '2',
0: '3'}[i]
def get_doc_type_index(i):
return {'financial_documents': '1',
'qualification_documents': '2',
'eligibility_documents': '3'}.get(i, i)
def convert_unit_name_to_unit_code(string):
return {
u"блок": u"D64",
u"гектар": u"HAR",
u"кілограми": u"KGM",
u"кілометри": u"KMT",
u"літр": u"LTR",
u"лот": u"LO",
u"метри квадратні": u"MTK",
u"метри кубічні": u"MTQ",
u"метри": u"MTR",
u"місяць": u"MON",
u"набір": u"SET",
u"пара": u"PR",
u"пачка": u"RM",
u"пачок": u"NMP",
u"послуга": u"E48",
u"рейс": u"E54",
u"тони": u"TNE",
u"упаковка": u"PK",
u"Флакон": u"VI",
u"штуки": u"H87",
u"ящик": u"BX",
}.get(string, string)
def convert_milestone_from_text_to_code(string):
return {
u"Аванс": u"prepayment",
u"Пiсляоплата": u"postpayment"
}.get(string, string)
def convert_milestone_from_text_to_title(string):
return {
u"Виконання робіт": "executionOfWorks",
u"Поставка товару": "deliveryOfGoods",
u"Надання послуг": "submittingServices",
u"Підписання договору": "signingTheContract",
u"Дата подання заявки": "submissionDateOfApplications",
u"Дата виставлення рахунку": "dateOfInvoicing",
u"Дата закінчення звітного періоду": "endDateOfTheReportingPeriod",
u"Інша подія": "anotherEvent",
}.get(string, string)
def convert_milestone_from_text_to_day_type(string):
return {
u"Робочі": "working",
u"Банківські": "banking",
u"Календарні": "calendar"
}.get(string, string)
def convert_main_procurement_category(string):
return {
u"Товари": "goods",
u"Послуги": "services",
u"Роботи": "works"
}.get(string, string)
def get_modulus_from_number(number):
if isinstance(number, int):
pass
elif isinstance(number, str):
number = int(number)
elif isinstance(number, unicode):
number = int(number)
return abs(number)
| return {
'aboveThresholdUA': 'Відкриті торги',
'belowThreshold': 'Допорогові закупівлі',
'negotiation': 'Переговорна процедура',
'aboveThresholdEU': 'Відкриті торги з публікацією англійською мовою',
'aboveThresholdUA.defense': 'Переговорна процедура для потреб оборони',
'reporting': 'Звіт про укладений договір',
'competitiveDialogueEU': 'Конкурентний діалог з публікацією англійською мовою 1-ий етап',
'competitiveDialogueUA': 'Конкурентний діалог 1-ий етап',
'open_esco': 'Відкриті торги для закупівлі енергосервісу',
'esco': 'Відкриті торги для закупівлі енергосервісу',
'closeFrameworkAgreementUA': 'Відкриті торги для укладання рамкової угоди',
'open_framework': 'Відкриті торгии для укладання рамкової угоди'
}[methodType].decode('utf-8')
def get_method_type(procedure_name):
return {
u'переговорна процедура для потреб оборони': 'aboveThresholdUA.defense',
u'допорогові закупівлі': 'belowThreshold',
u'відкриті торги з публікацією англійською мовою': 'aboveThresholdEU',
u'переговорна процедура': 'negotiation',
u'відкриті торги': 'aboveThresholdUA',
u'конкурентний |
forms.py | # -*- coding: utf-8
from __future__ import unicode_literals, absolute_import
from django import forms
from jsonsuit.widgets import JSONSuit, ReadonlyJSONSuit |
class TestForm(forms.Form):
stats = forms.CharField(widget=JSONSuit)
class ReadonlyTestForm(forms.Form):
stats = forms.CharField(widget=ReadonlyJSONSuit) | |
utils_test.py | # coding=utf-8
# Copyright 2018 The TF-Agents Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for tf_agents.bandits.agents.utils."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
import numpy as np
import tensorflow as tf # pylint: disable=g-explicit-tensorflow-version-import
import tensorflow_probability as tfp
from tf_agents.bandits.agents import utils
from tf_agents.specs import tensor_spec
tfd = tfp.distributions
tf.compat.v1.enable_v2_behavior()
def test_cases():
return parameterized.named_parameters(
{
'testcase_name': '_batch1_contextdim10',
'batch_size': 1,
'context_dim': 10,
}, {
'testcase_name': '_batch4_contextdim5',
'batch_size': 4,
'context_dim': 5,
})
class UtilsTest(tf.test.TestCase, parameterized.TestCase):
def testNumActionsFromTensorSpecGoodSpec(self):
action_spec = tensor_spec.BoundedTensorSpec(
dtype=tf.int32, shape=(), minimum=0, maximum=15)
num_actions = utils.get_num_actions_from_tensor_spec(action_spec)
self.assertEqual(num_actions, 16)
def testNumActionsFromTensorSpecWrongRank(self):
action_spec = tensor_spec.BoundedTensorSpec(
dtype=tf.int32, shape=(2, 3), minimum=0, maximum=15)
with self.assertRaisesRegexp(ValueError, r'Action spec must be a scalar'):
utils.get_num_actions_from_tensor_spec(action_spec)
@test_cases()
def testBUpdate(self, batch_size, context_dim):
b_array = np.array(range(context_dim))
r_array = np.array(range(batch_size)).reshape((batch_size, 1))
x_array = np.array(range(batch_size * context_dim)).reshape(
(batch_size, context_dim))
rx = r_array * x_array
expected_b_updated_array = b_array + np.sum(rx, axis=0)
b = tf.constant(b_array, dtype=tf.float32, shape=[context_dim])
r = tf.constant(r_array, dtype=tf.float32, shape=[batch_size])
x = tf.constant(x_array, dtype=tf.float32, shape=[batch_size, context_dim])
b_update = utils.sum_reward_weighted_observations(r, x)
self.assertAllClose(expected_b_updated_array, self.evaluate(b + b_update))
@test_cases()
def testBUpdateEmptyObservations(self, batch_size, context_dim):
r = tf.constant([], dtype=tf.float32, shape=[0, 1])
x = tf.constant([], dtype=tf.float32, shape=[0, context_dim])
b_update = utils.sum_reward_weighted_observations(r, x)
expected_b_update_array = np.zeros([context_dim], dtype=np.float32)
self.assertAllClose(expected_b_update_array, self.evaluate(b_update))
def testLaplacian1D(self):
action_spec = tensor_spec.BoundedTensorSpec(
dtype=tf.int32, shape=(), minimum=0, maximum=4)
num_actions = utils.get_num_actions_from_tensor_spec(action_spec)
laplacian_matrix = tf.convert_to_tensor(
utils.build_laplacian_over_ordinal_integer_actions(action_spec),
dtype=tf.float32)
res = tf.matmul(
laplacian_matrix, tf.ones([num_actions, 1], dtype=tf.float32))
# The vector of ones is in the null space of the Laplacian matrix.
self.assertAllClose(0.0, self.evaluate(tf.norm(res)))
# The row sum is zero.
row_sum = tf.reduce_sum(laplacian_matrix, 1)
self.assertAllClose(0.0, self.evaluate(tf.norm(row_sum)))
# The column sum is zero.
column_sum = tf.reduce_sum(laplacian_matrix, 0)
self.assertAllClose(0.0, self.evaluate(tf.norm(column_sum)))
# The diagonal elements are 2.0.
self.assertAllClose(2.0, laplacian_matrix[1, 1])
laplacian_matrix_expected = np.array(
[[1.0, -1.0, 0.0, 0.0, 0.0],
[-1.0, 2.0, -1.0, 0.0, 0.0],
[0.0, -1.0, 2.0, -1.0, 0.0],
[0.0, 0.0, -1.0, 2.0, -1.0],
[0.0, 0.0, 0.0, -1.0, 1.0]])
self.assertAllClose(laplacian_matrix_expected,
self.evaluate(laplacian_matrix))
def | (self):
input_vects = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
pdist_matrix = np.array(
[[0.0, 27.0, 108.0,],
[27.0, 0.0, 27.0],
[108.0, 27.0, 0.0]])
tf_dist_matrix = utils.compute_pairwise_distances(
tf.constant(input_vects, dtype=tf.float32))
self.assertAllClose(pdist_matrix, self.evaluate(tf_dist_matrix))
def testBuildLaplacianNearestNeighborGraph(self):
input_vects = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9],
[10, 11, 12], [13, 14, 15]])
num_actions = input_vects.shape[0]
laplacian_matrix = utils.build_laplacian_nearest_neighbor_graph(
tf.constant(input_vects, dtype=tf.float32), k=2)
# The vector of ones is in the null space of the Laplacian matrix.
res = tf.matmul(
laplacian_matrix, tf.ones([num_actions, 1], dtype=tf.float32))
self.assertAllClose(0.0, self.evaluate(tf.norm(res)))
# The row sum is zero.
row_sum = tf.reduce_sum(laplacian_matrix, 1)
self.assertAllClose(0.0, self.evaluate(tf.norm(row_sum)))
# The column sum is zero.
column_sum = tf.reduce_sum(laplacian_matrix, 0)
self.assertAllClose(0.0, self.evaluate(tf.norm(column_sum)))
self.assertAllClose(2.0, laplacian_matrix[0, 0])
self.assertAllClose(4.0, laplacian_matrix[2, 2])
if __name__ == '__main__':
tf.test.main()
| testComputePairwiseDistances |
constants.py | EXCEL_FILE_TYPE = (("Excel 2007 files","*.xlsx"),) |
||
runner_httphandler.go | package testx
import ( |
"github.com/drykit-go/testx/check"
"github.com/drykit-go/testx/checkconv"
"github.com/drykit-go/testx/internal/httpconv"
"github.com/drykit-go/testx/internal/ioutil"
)
var _ HTTPHandlerRunner = (*httpHandlerRunner)(nil)
type httpHandlerRunner struct {
baseRunner
in httpHandlerRunnerInput
got httpHandlerRunnerResults
}
func (r *httpHandlerRunner) WithRequest(request *http.Request) HTTPHandlerRunner {
return &httpHandlerRunner{
baseRunner: r.baseRunner,
in: r.in.withRequest(request),
}
}
func (r *httpHandlerRunner) Duration(checkers ...check.DurationChecker) HTTPHandlerRunner {
for _, c := range checkers {
r.addCheck(baseCheck{
label: "handling duration",
get: func() gottype { return r.got.duration },
checker: checkconv.FromDuration(c),
})
}
return r
}
func (r *httpHandlerRunner) Request(checkers ...check.HTTPRequestChecker) HTTPHandlerRunner {
for _, c := range checkers {
r.addCheck(baseCheck{
label: "http request",
get: func() gottype { return r.got.request },
checker: checkconv.FromHTTPRequest(c),
})
}
return r
}
func (r *httpHandlerRunner) Response(checkers ...check.HTTPResponseChecker) HTTPHandlerRunner {
for _, c := range checkers {
r.addCheck(baseCheck{
label: "http response",
get: func() gottype { return r.got.response },
checker: checkconv.FromHTTPResponse(c),
})
}
return r
}
func (r *httpHandlerRunner) Run(t *testing.T) {
t.Helper()
r.setResults()
r.run(t)
}
func (r *httpHandlerRunner) DryRun() HandlerResulter {
r.setResults()
results := r.got
results.baseResults = r.dryRun()
return results
}
func (r *httpHandlerRunner) setResults() {
rr := httptest.NewRecorder()
if r.in.rq == nil {
r.in.rq = r.defaultRequest()
}
handler := r.in.mw(r.interceptRequest(r.in.hf))
r.got.duration = timeFunc(func() {
handler(rr, r.in.rq)
})
r.got.response = rr.Result() //nolint:bodyclose
r.got.response.Header = rr.Header()
}
func (r *httpHandlerRunner) defaultRequest() *http.Request {
req := httptest.NewRequest("GET", "/", nil)
return req
}
func newHTTPHandlerRunner(
hf http.HandlerFunc,
middlewares ...func(http.HandlerFunc) http.HandlerFunc,
) HTTPHandlerRunner {
runner := &httpHandlerRunner{in: httpHandlerRunnerInput{hf: hf}}
runner.setMergedMiddlewares(middlewares...)
return runner
}
func (r *httpHandlerRunner) interceptRequest(next http.HandlerFunc) http.HandlerFunc {
return func(w http.ResponseWriter, req *http.Request) {
r.got.request = req.Clone(req.Context())
next(w, req)
}
}
func (r *httpHandlerRunner) setMergedMiddlewares(middlewares ...func(http.HandlerFunc) http.HandlerFunc) {
r.in.mw = httpconv.Merge(middlewares...)
}
type httpHandlerRunnerInput struct {
hf http.HandlerFunc
mw func(http.HandlerFunc) http.HandlerFunc
rq *http.Request
}
func (in httpHandlerRunnerInput) withRequest(rq *http.Request) httpHandlerRunnerInput {
return httpHandlerRunnerInput{hf: in.hf, mw: in.mw, rq: rq}
}
type httpHandlerRunnerResults struct {
baseResults
request *http.Request
response *http.Response
duration time.Duration
}
var _ HandlerResulter = (*httpHandlerRunnerResults)(nil)
func (res httpHandlerRunnerResults) ResponseHeader() http.Header {
return res.response.Header
}
func (res httpHandlerRunnerResults) ResponseStatus() string {
return res.response.Status
}
func (res httpHandlerRunnerResults) ResponseCode() int {
return res.response.StatusCode
}
func (res httpHandlerRunnerResults) ResponseBody() []byte {
return ioutil.NopRead(&res.response.Body)
}
func (res httpHandlerRunnerResults) ResponseDuration() time.Duration {
return res.duration
} | "net/http"
"net/http/httptest"
"testing"
"time" |
id_op.py | # SPDX-License-Identifier: Apache-2.0
from ..common._apply_operation import apply_identity
from ..common._registration import register_converter
from ..common._topology import Scope, Operator
from ..common._container import ModelComponentContainer
def | (scope: Scope, operator: Operator,
container: ModelComponentContainer):
apply_identity(
scope, operator.inputs[0].full_name,
operator.outputs[0].full_name, container,
operator_name=scope.get_unique_operator_name('CIdentity'))
register_converter('SklearnIdentity', convert_sklearn_identity)
| convert_sklearn_identity |
Header.js | import React from 'react';
import AppBar from '@material-ui/core/AppBar';
import Toolbar from '@material-ui/core/Toolbar';
import Typography from '@material-ui/core/Typography';
import CssBaseline from '@material-ui/core/CssBaseline';
import { makeStyles } from '@material-ui/core/styles';
const useStyles = makeStyles((theme) => ({
appBar: {
borderBottom: `1px solid ${theme.palette.divider}`,
},
}));
function Header() {
const classes = useStyles();
return (
<React.Fragment>
<CssBaseline />
<AppBar | elevation={0}
className={classes.appBar}
>
<Toolbar>
<Typography variant="h6" color="inherit" noWrap>
StockF
</Typography>
</Toolbar>
</AppBar>
</React.Fragment>
);
}
export default Header; | position="static"
color="white" |
orphan.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Orphan checker: every impl either implements a trait defined in this
//! crate or pertains to a type defined in this crate.
use metadata::cstore::LOCAL_CRATE;
use middle::def_id::DefId;
use middle::traits;
use middle::ty;
use syntax::ast;
use syntax::codemap::Span;
use rustc_front::visit;
use rustc_front::hir;
use rustc_front::hir::{Item, ItemImpl};
pub fn check(tcx: &ty::ctxt) |
struct OrphanChecker<'cx, 'tcx:'cx> {
tcx: &'cx ty::ctxt<'tcx>
}
impl<'cx, 'tcx> OrphanChecker<'cx, 'tcx> {
fn check_def_id(&self, item: &hir::Item, def_id: DefId) {
if def_id.krate != LOCAL_CRATE {
span_err!(self.tcx.sess, item.span, E0116,
"cannot define inherent `impl` for a type outside of the \
crate where the type is defined; define and implement \
a trait or new type instead");
}
}
fn check_primitive_impl(&self,
impl_def_id: DefId,
lang_def_id: Option<DefId>,
lang: &str,
ty: &str,
span: Span) {
match lang_def_id {
Some(lang_def_id) if lang_def_id == impl_def_id => { /* OK */ },
_ => {
span_err!(self.tcx.sess, span, E0390,
"only a single inherent implementation marked with `#[lang = \"{}\"]` \
is allowed for the `{}` primitive", lang, ty);
span_help!(self.tcx.sess, span,
"consider using a trait to implement these methods");
}
}
}
/// Checks exactly one impl for orphan rules and other such
/// restrictions. In this fn, it can happen that multiple errors
/// apply to a specific impl, so just return after reporting one
/// to prevent inundating the user with a bunch of similar error
/// reports.
fn check_item(&self, item: &hir::Item) {
let def_id = self.tcx.map.local_def_id(item.id);
match item.node {
hir::ItemImpl(_, _, _, None, _, _) => {
// For inherent impls, self type must be a nominal type
// defined in this crate.
debug!("coherence2::orphan check: inherent impl {}",
self.tcx.map.node_to_string(item.id));
let self_ty = self.tcx.lookup_item_type(def_id).ty;
match self_ty.sty {
ty::TyEnum(def, _) |
ty::TyStruct(def, _) => {
self.check_def_id(item, def.did);
}
ty::TyTrait(ref data) => {
self.check_def_id(item, data.principal_def_id());
}
ty::TyBox(..) => {
match self.tcx.lang_items.require_owned_box() {
Ok(trait_id) => self.check_def_id(item, trait_id),
Err(msg) => self.tcx.sess.span_fatal(item.span, &msg),
}
}
ty::TyChar => {
self.check_primitive_impl(def_id,
self.tcx.lang_items.char_impl(),
"char",
"char",
item.span);
}
ty::TyStr => {
self.check_primitive_impl(def_id,
self.tcx.lang_items.str_impl(),
"str",
"str",
item.span);
}
ty::TySlice(_) => {
self.check_primitive_impl(def_id,
self.tcx.lang_items.slice_impl(),
"slice",
"[T]",
item.span);
}
ty::TyRawPtr(ty::TypeAndMut { ty: _, mutbl: hir::MutImmutable }) => {
self.check_primitive_impl(def_id,
self.tcx.lang_items.const_ptr_impl(),
"const_ptr",
"*const T",
item.span);
}
ty::TyRawPtr(ty::TypeAndMut { ty: _, mutbl: hir::MutMutable }) => {
self.check_primitive_impl(def_id,
self.tcx.lang_items.mut_ptr_impl(),
"mut_ptr",
"*mut T",
item.span);
}
ty::TyInt(ast::TyI8) => {
self.check_primitive_impl(def_id,
self.tcx.lang_items.i8_impl(),
"i8",
"i8",
item.span);
}
ty::TyInt(ast::TyI16) => {
self.check_primitive_impl(def_id,
self.tcx.lang_items.i16_impl(),
"i16",
"i16",
item.span);
}
ty::TyInt(ast::TyI32) => {
self.check_primitive_impl(def_id,
self.tcx.lang_items.i32_impl(),
"i32",
"i32",
item.span);
}
ty::TyInt(ast::TyI64) => {
self.check_primitive_impl(def_id,
self.tcx.lang_items.i64_impl(),
"i64",
"i64",
item.span);
}
ty::TyInt(ast::TyIs) => {
self.check_primitive_impl(def_id,
self.tcx.lang_items.isize_impl(),
"isize",
"isize",
item.span);
}
ty::TyUint(ast::TyU8) => {
self.check_primitive_impl(def_id,
self.tcx.lang_items.u8_impl(),
"u8",
"u8",
item.span);
}
ty::TyUint(ast::TyU16) => {
self.check_primitive_impl(def_id,
self.tcx.lang_items.u16_impl(),
"u16",
"u16",
item.span);
}
ty::TyUint(ast::TyU32) => {
self.check_primitive_impl(def_id,
self.tcx.lang_items.u32_impl(),
"u32",
"u32",
item.span);
}
ty::TyUint(ast::TyU64) => {
self.check_primitive_impl(def_id,
self.tcx.lang_items.u64_impl(),
"u64",
"u64",
item.span);
}
ty::TyUint(ast::TyUs) => {
self.check_primitive_impl(def_id,
self.tcx.lang_items.usize_impl(),
"usize",
"usize",
item.span);
}
ty::TyFloat(ast::TyF32) => {
self.check_primitive_impl(def_id,
self.tcx.lang_items.f32_impl(),
"f32",
"f32",
item.span);
}
ty::TyFloat(ast::TyF64) => {
self.check_primitive_impl(def_id,
self.tcx.lang_items.f64_impl(),
"f64",
"f64",
item.span);
}
_ => {
span_err!(self.tcx.sess, item.span, E0118,
"no base type found for inherent implementation; \
implement a trait or new type instead");
return;
}
}
}
hir::ItemImpl(_, _, _, Some(_), _, _) => {
// "Trait" impl
debug!("coherence2::orphan check: trait impl {}",
self.tcx.map.node_to_string(item.id));
let trait_ref = self.tcx.impl_trait_ref(def_id).unwrap();
let trait_def_id = trait_ref.def_id;
match traits::orphan_check(self.tcx, def_id) {
Ok(()) => { }
Err(traits::OrphanCheckErr::NoLocalInputType) => {
span_err!(
self.tcx.sess, item.span, E0117,
"the impl does not reference any \
types defined in this crate; \
only traits defined in the current crate can be \
implemented for arbitrary types");
return;
}
Err(traits::OrphanCheckErr::UncoveredTy(param_ty)) => {
span_err!(self.tcx.sess, item.span, E0210,
"type parameter `{}` must be used as the type parameter for \
some local type (e.g. `MyStruct<T>`); only traits defined in \
the current crate can be implemented for a type parameter",
param_ty);
return;
}
}
// In addition to the above rules, we restrict impls of defaulted traits
// so that they can only be implemented on structs/enums. To see why this
// restriction exists, consider the following example (#22978). Imagine
// that crate A defines a defaulted trait `Foo` and a fn that operates
// on pairs of types:
//
// ```
// // Crate A
// trait Foo { }
// impl Foo for .. { }
// fn two_foos<A:Foo,B:Foo>(..) {
// one_foo::<(A,B)>(..)
// }
// fn one_foo<T:Foo>(..) { .. }
// ```
//
// This type-checks fine; in particular the fn
// `two_foos` is able to conclude that `(A,B):Foo`
// because `A:Foo` and `B:Foo`.
//
// Now imagine that crate B comes along and does the following:
//
// ```
// struct A { }
// struct B { }
// impl Foo for A { }
// impl Foo for B { }
// impl !Send for (A, B) { }
// ```
//
// This final impl is legal according to the orpan
// rules, but it invalidates the reasoning from
// `two_foos` above.
debug!("trait_ref={:?} trait_def_id={:?} trait_has_default_impl={}",
trait_ref,
trait_def_id,
self.tcx.trait_has_default_impl(trait_def_id));
if
self.tcx.trait_has_default_impl(trait_def_id) &&
trait_def_id.krate != LOCAL_CRATE
{
let self_ty = trait_ref.self_ty();
let opt_self_def_id = match self_ty.sty {
ty::TyStruct(self_def, _) | ty::TyEnum(self_def, _) =>
Some(self_def.did),
ty::TyBox(..) =>
self.tcx.lang_items.owned_box(),
_ =>
None
};
let msg = match opt_self_def_id {
// We only want to permit structs/enums, but not *all* structs/enums.
// They must be local to the current crate, so that people
// can't do `unsafe impl Send for Rc<SomethingLocal>` or
// `impl !Send for Box<SomethingLocalAndSend>`.
Some(self_def_id) => {
if self_def_id.is_local() {
None
} else {
Some(format!(
"cross-crate traits with a default impl, like `{}`, \
can only be implemented for a struct/enum type \
defined in the current crate",
self.tcx.item_path_str(trait_def_id)))
}
}
_ => {
Some(format!(
"cross-crate traits with a default impl, like `{}`, \
can only be implemented for a struct/enum type, \
not `{}`",
self.tcx.item_path_str(trait_def_id),
self_ty))
}
};
if let Some(msg) = msg {
span_err!(self.tcx.sess, item.span, E0321, "{}", msg);
return;
}
}
// Disallow *all* explicit impls of `Sized` and `Unsize` for now.
if Some(trait_def_id) == self.tcx.lang_items.sized_trait() {
span_err!(self.tcx.sess, item.span, E0322,
"explicit impls for the `Sized` trait are not permitted");
return;
}
if Some(trait_def_id) == self.tcx.lang_items.unsize_trait() {
span_err!(self.tcx.sess, item.span, E0328,
"explicit impls for the `Unsize` trait are not permitted");
return;
}
}
hir::ItemDefaultImpl(..) => {
// "Trait" impl
debug!("coherence2::orphan check: default trait impl {}",
self.tcx.map.node_to_string(item.id));
let trait_ref = self.tcx.impl_trait_ref(def_id).unwrap();
if trait_ref.def_id.krate != LOCAL_CRATE {
span_err!(self.tcx.sess, item.span, E0318,
"cannot create default implementations for traits outside the \
crate they're defined in; define a new trait instead");
return;
}
}
_ => {
// Not an impl
}
}
}
}
impl<'cx, 'tcx,'v> visit::Visitor<'v> for OrphanChecker<'cx, 'tcx> {
fn visit_item(&mut self, item: &hir::Item) {
self.check_item(item);
visit::walk_item(self, item);
}
}
| {
let mut orphan = OrphanChecker { tcx: tcx };
visit::walk_crate(&mut orphan, tcx.map.krate());
} |
get-template.ts | import {
MARKET_TEMPLATES,
MARKET_SUB_TEMPLATES,
MARKET_TYPE_TEMPLATES,
MarketCardTemplate,
TemplateInputType,
CHOICE,
REQUIRED,
} from 'modules/create-market/constants';
import {
CategoryTemplate,
Categories,
Template,
TemplateInput,
} from 'modules/types';
import deepClone from 'utils/deep-clone';
import { Getters } from '@augurproject/sdk';
import { formatDai } from 'utils/format-number';
import { convertUnixToFormattedDate } from 'utils/format-date';
import { NameValuePair } from 'modules/portfolio/types';
import { TEMPLATES } from 'modules/create-market/templates';
export const getTemplateRadioCardsMarketTypes = (categories: Categories) => {
if (!categories || !categories.primary) return MARKET_TYPE_TEMPLATES;
const templates = getTemplatesPerSubcategory(categories, false);
if (!templates) return [];
const marketTypes = templates.reduce((p, t) => [...p, t.marketType], []);
return [...new Set(marketTypes)].map(m =>
MARKET_TYPE_TEMPLATES.find(t => t.value === m)
);
};
export const getTemplatesByTertiaryMarketTypes = (categories: Categories) => {
if (!categories || !categories.primary) return MARKET_TYPE_TEMPLATES;
const templates = getTemplatesPerSubcategory(categories, true);
if (!templates) return [];
const marketTypes = templates.reduce((p, t) => [...p, t.marketType], []);
return [...new Set(marketTypes)].map(m =>
MARKET_TYPE_TEMPLATES.find(t => t.value === m)
);
};
export const getTemplateRadioCards = (
categories: Categories,
categoryStats: Getters.Markets.CategoryStats | null
): MarketCardTemplate[] => {
const cats = getTemplateCategories(categories);
if (cats.length === 0) return [];
if (!categories.primary) {
return cats
.map(c => MARKET_TEMPLATES.find(t => t.value === c))
.map(c => addCategoryStats(categories, c, categoryStats));
}
const useParentValues = hasNoTemplateCategoryChildren(categories.primary);
if (categories.primary && (useParentValues || !categories.secondary)) {
return cats
.map(c =>
MARKET_SUB_TEMPLATES[categories.primary].find(t => t.value === c)
)
.map(c => addCategoryStats(categories, c, categoryStats));
}
if (categories.primary && categories.secondary) {
return cats
.map(c =>
MARKET_SUB_TEMPLATES[categories.primary].find(t => t.value === c)
)
.map(c => addCategoryStats(categories, c, categoryStats));
}
return [];
};
export const addCategoryStats = (
categories: Categories | null,
card: MarketCardTemplate,
categoryStats: Getters.Markets.CategoryStats
): MarketCardTemplate => {
if (!categoryStats) return card;
if (!card) return card;
let stats = null;
const cardValue = card.value.toLowerCase();
if (!categories || !categories.primary) stats = categoryStats[cardValue];
if (categories && categories.primary && !categories.secondary) {
const catStats = categoryStats[categories.primary.toLowerCase()];
stats = catStats && catStats.categories[cardValue];
}
if (categories && categories.primary && categories.secondary) {
let catStats = categoryStats[categories.primary.toLowerCase()];
catStats = catStats[categories.secondary.toLowerCase()];
stats = catStats && catStats.categories[cardValue];
}
if (stats) {
const vol = formatDai(stats.volume || '0').formatted;
const mkrLabel = stats.numberOfMarkets === 1 ? 'Market' : 'Markets';
card.description = `${stats.numberOfMarkets} ${mkrLabel} | $${vol}`;
}
return card;
};
export const getTemplateCategories = (categories: Categories): string[] => {
let emptyCats = [];
if (!categories || !categories.primary) return Object.keys(TEMPLATES).sort();
const primaryCat = TEMPLATES[categories.primary];
if (!primaryCat) return emptyCats;
if (!categories.secondary)
return primaryCat.children ? Object.keys(primaryCat.children).sort() : [];
const secondaryCat = primaryCat.children
? primaryCat.children[categories.secondary]
: emptyCats;
if (!secondaryCat) return emptyCats;
return secondaryCat.children ? Object.keys(secondaryCat.children).sort() : [];
};
export const getTemplateCategoriesByMarketType = (
categories: Categories,
marketType: string
): string[] => {
let emptyCats = [];
if (!categories || !categories.primary) return Object.keys(TEMPLATES);
const primaryCat = TEMPLATES[categories.primary];
if (!primaryCat) return emptyCats;
if (!categories.secondary)
return primaryCat.children ? Object.keys(primaryCat.children) : [];
const secondaryCat = primaryCat.children
? primaryCat.children[categories.secondary]
: emptyCats;
if (!secondaryCat) return emptyCats;
if (secondaryCat.children) {
let children = [];
Object.keys(secondaryCat.children).map(tertiary => {
const marketTypes = getTemplatesByTertiaryMarketTypes({
...categories,
tertiary: tertiary,
});
if (marketTypes.find(type => type.value === marketType)) {
children = children.concat(tertiary);
}
});
return children;
} else {
return [];
}
};
export const getTemplateCategoriesList = (
categories: Categories,
marketType: string
): NameValuePair[] => {
const results = getTemplateCategoriesByMarketType(categories, marketType);
if (!results || results.length === 0) return [];
const mapped = results.map(v => ({ label: v, value: v }));
return mapped as NameValuePair[];
};
export const getTemplatesPerSubcategory = (
categories: Categories,
filterByTertiary: boolean
): Template[] => {
const primary: CategoryTemplate = TEMPLATES[categories.primary];
if (!primary.children) return primary.templates;
const secondary = primary.children[categories.secondary];
if (secondary.children) {
let allSubCategoryTemplates = [];
Object.keys(secondary.children).forEach(key => {
const child = secondary.children[key];
if (
(filterByTertiary && key === categories.tertiary) ||
!filterByTertiary
)
allSubCategoryTemplates = allSubCategoryTemplates.concat(
child.templates
);
});
return allSubCategoryTemplates;
} else {
return secondary.templates;
}
};
export const getTemplates = (
categories: Categories,
marketType: string
): Template[] => {
if (categories.tertiary) {
const primary: CategoryTemplate = TEMPLATES[categories.primary];
const secondary = primary.children[categories.secondary];
const tertiary = secondary.children[categories.tertiary];
return marketType
? getTemplatesByMarketType(tertiary.templates, marketType)
: tertiary.templates;
}
if (categories.secondary) {
const primary: CategoryTemplate = TEMPLATES[categories.primary];
const secondary = primary.children[categories.secondary];
return marketType
? getTemplatesByMarketType(secondary.templates, marketType)
: secondary.templates;
}
if (categories.primary) {
const primary: CategoryTemplate = TEMPLATES[categories.primary];
return marketType
? getTemplatesByMarketType(primary.templates, marketType)
: primary.templates;
}
const categoryTemplates: CategoryTemplate = TEMPLATES[categories.primary];
if (!categoryTemplates) return [];
return marketType
? getTemplatesByMarketType(categoryTemplates.templates, marketType)
: categoryTemplates.templates;
};
const getTemplatesByMarketType = (
categoryTemplates: Template[],
marketType
) => {
const values = categoryTemplates.filter(t => t.marketType === marketType);
return deepClone<Template[]>(values);
};
export const getTemplateReadableDescription = (template: Template) => {
let question = template.question;
const inputs = template.inputs;
for (const i in inputs) {
question = question.replace(
`[${inputs[i].id}]`,
`[${inputs[i].placeholder}]`
);
}
return question;
};
export const buildMarketDescription = (
question: string,
inputs: TemplateInput[]
) => {
inputs.forEach((input: TemplateInput) => {
const userInputFormatted =
input.type === TemplateInputType.DATEYEAR
? convertUnixToFormattedDate(Number(input.userInput))
.formattedSimpleData
: input.userInput;
question = question.replace(
`[${input.id}]`,
`${input.userInput ? userInputFormatted : `[${input.placeholder}]`}`
);
});
return question;
};
export const tellIfEditableOutcomes = (inputs: TemplateInput[]) => {
return (
inputs.filter(
input =>
input.type === TemplateInputType.USER_DESCRIPTION_OUTCOME ||
input.type === TemplateInputType.SUBSTITUTE_USER_OUTCOME || | };
export const createTemplateOutcomes = (inputs: TemplateInput[]) => {
return inputs
.filter(
input =>
input.type === TemplateInputType.SUBSTITUTE_USER_OUTCOME ||
input.type === TemplateInputType.ADDED_OUTCOME ||
input.type === TemplateInputType.USER_DESCRIPTION_OUTCOME ||
input.type === TemplateInputType.USER_DESCRIPTION_DROPDOWN_OUTCOME
)
.map((input: TemplateInput) => {
if (input.type === TemplateInputType.SUBSTITUTE_USER_OUTCOME) {
return substituteUserOutcome(input, inputs);
}
return input.userInput || input.placeholder;
});
};
export const substituteUserOutcome = (
input: TemplateInput,
inputs: TemplateInput[]
) => {
let matches = input.placeholder.match(/\[(.*?)\]/);
let submatch = '0';
if (matches) {
submatch = String(matches[1]);
}
let text = input.placeholder.replace(
`[${submatch}]`,
`${
inputs[submatch].userInput
? inputs[submatch].userInput
: `[${inputs[submatch].placeholder}]`
}`
);
return text;
};
export const buildResolutionDetails = (
userDetails: string,
resolutionRules: ResolutionRules
) => {
let details = userDetails;
Object.keys(resolutionRules).forEach(
type =>
type &&
resolutionRules[type].forEach(rule => {
if ((type === CHOICE && rule.isSelected) || type === REQUIRED) {
if (details.length > 0) {
details = details.concat('\n');
}
details = details.concat(rule.text);
}
})
);
return details;
};
export const hasNoTemplateCategoryChildren = category => {
if (!category) return false;
if (TEMPLATES[category].children) return false;
return true;
};
export const hasNoTemplateCategoryTertiaryChildren = (
category,
subcategory
) => {
if (!category || !subcategory) return false;
if (TEMPLATES[category].children[subcategory].children) return false;
return true;
}; | input.type === TemplateInputType.USER_DESCRIPTION_DROPDOWN_OUTCOME
).length > 0
); |
device_history_test.go | // Copyright 2015 Keybase, Inc. All rights reserved. Use of
// this source code is governed by the included BSD license.
package engine
import (
"testing"
"github.com/keybase/client/go/libkb"
keybase1 "github.com/keybase/client/go/protocol/keybase1"
)
func TestDeviceHistoryBasic(t *testing.T) {
tc := SetupEngineTest(t, "devhist")
defer tc.Cleanup()
CreateAndSignupFakeUserPaper(tc, "dhst")
ctx := &Context{}
eng := NewDeviceHistorySelf(tc.G)
if err := RunEngine(eng, ctx); err != nil {
t.Fatal(err)
}
devs := eng.Devices()
if len(devs) != 2 {
t.Errorf("num devices: %d, expected 2", len(devs))
}
var desktop keybase1.DeviceDetail
var paper keybase1.DeviceDetail
for _, d := range devs {
switch d.Device.Type {
case libkb.DeviceTypePaper:
paper = d
case libkb.DeviceTypeDesktop:
desktop = d
default:
t.Fatalf("unexpected device type %s", d.Device.Type)
}
}
// paper's provisioner should be desktop
if paper.Provisioner == nil {
t.Fatal("paper device has no provisioner")
}
if paper.Provisioner.DeviceID != desktop.Device.DeviceID {
t.Errorf("paper provisioned id: %s, expected %s", paper.Provisioner.DeviceID, desktop.Device.DeviceID)
t.Logf("desktop: %+v", desktop)
t.Logf("paper: %+v", paper)
}
// Check that LastUsedTime is set (since we're fetching our own device history)
for _, d := range devs {
if d.Device.LastUsedTime == 0 {
t.Fatal("last used time not set")
}
}
}
func | (t *testing.T) {
tc := SetupEngineTest(t, "devhist")
defer tc.Cleanup()
u := CreateAndSignupFakeUserPaper(tc, "dhst")
ctx := &Context{}
eng := NewDeviceHistorySelf(tc.G)
if err := RunEngine(eng, ctx); err != nil {
t.Fatal(err)
}
var desktop keybase1.DeviceDetail
var paper keybase1.DeviceDetail
for _, d := range eng.Devices() {
switch d.Device.Type {
case libkb.DeviceTypePaper:
paper = d
case libkb.DeviceTypeDesktop:
desktop = d
default:
t.Fatalf("unexpected device type %s", d.Device.Type)
}
}
// paper's provisioner should be desktop
if paper.Provisioner == nil {
t.Fatal("paper device has no provisioner")
}
if paper.Provisioner.DeviceID != desktop.Device.DeviceID {
t.Errorf("paper provisioned id: %s, expected %s", paper.Provisioner.DeviceID, desktop.Device.DeviceID)
t.Logf("desktop: %+v", desktop)
t.Logf("paper: %+v", paper)
}
// revoke the paper device
ctx.SecretUI = u.NewSecretUI()
ctx.LogUI = tc.G.UI.GetLogUI()
reng := NewRevokeDeviceEngine(RevokeDeviceEngineArgs{ID: paper.Device.DeviceID}, tc.G)
if err := RunEngine(reng, ctx); err != nil {
t.Fatal(err)
}
// get history after revoke
eng = NewDeviceHistorySelf(tc.G)
if err := RunEngine(eng, ctx); err != nil {
t.Fatal(err)
}
var desktop2 keybase1.DeviceDetail
var paper2 keybase1.DeviceDetail
for _, d := range eng.Devices() {
switch d.Device.Type {
case libkb.DeviceTypePaper:
paper2 = d
case libkb.DeviceTypeDesktop:
desktop2 = d
default:
t.Fatalf("unexpected device type %s", d.Device.Type)
}
}
// paper's provisioner should (still) be desktop
if paper2.Provisioner == nil {
t.Fatal("paper device has no provisioner")
}
if paper2.Provisioner.DeviceID != desktop2.Device.DeviceID {
t.Errorf("paper provisioned id: %s, expected %s", paper2.Provisioner.DeviceID, desktop2.Device.DeviceID)
t.Logf("desktop: %+v", desktop2)
t.Logf("paper: %+v", paper2)
}
if paper2.RevokedAt == nil {
t.Fatal("paper device RevokedAt is nil")
}
if paper2.RevokedBy.IsNil() {
t.Fatal("paper device RevokedBy is nil")
}
if paper2.RevokedByDevice == nil {
t.Fatal("paper device RevokedByDevice is nil")
}
if paper2.RevokedByDevice.DeviceID != desktop.Device.DeviceID {
t.Fatalf("paper revoked by wrong device, %s != %s", paper2.RevokedByDevice.DeviceID,
desktop.Device.DeviceID)
}
if paper2.RevokedByDevice.Name != desktop.Device.Name {
t.Fatalf("paper revoked by wrong device, %s != %s", paper2.RevokedByDevice.Name,
desktop.Device.Name)
}
}
func TestDeviceHistoryPGP(t *testing.T) {
tc := SetupEngineTest(t, "devhist")
u1 := createFakeUserWithPGPOnly(t, tc)
t.Log("Created fake synced pgp user")
Logout(tc)
tc.Cleanup()
// redo SetupEngineTest to get a new home directory...should look like a new device.
tc = SetupEngineTest(t, "devhist")
defer tc.Cleanup()
ctx := &Context{
ProvisionUI: newTestProvisionUIPassphrase(),
LoginUI: &libkb.TestLoginUI{Username: u1.Username},
LogUI: tc.G.UI.GetLogUI(),
SecretUI: u1.NewSecretUI(),
GPGUI: &gpgtestui{},
}
eng := NewLogin(tc.G, libkb.DeviceTypeDesktop, "", keybase1.ClientType_CLI)
if err := RunEngine(eng, ctx); err != nil {
t.Fatal(err)
}
ctx = &Context{}
heng := NewDeviceHistorySelf(tc.G)
if err := RunEngine(heng, ctx); err != nil {
t.Fatal(err)
}
devs := heng.Devices()
if len(devs) != 2 {
t.Errorf("num devices: %d, expected 2", len(devs))
}
var desktop keybase1.DeviceDetail
var paper keybase1.DeviceDetail
for _, d := range devs {
switch d.Device.Type {
case libkb.DeviceTypePaper:
paper = d
case libkb.DeviceTypeDesktop:
desktop = d
default:
t.Fatalf("unexpected device type %s", d.Device.Type)
}
}
// paper's provisioner should be desktop
if paper.Provisioner == nil {
t.Fatal("paper device has no provisioner")
}
if paper.Provisioner.DeviceID != desktop.Device.DeviceID {
t.Errorf("paper provisioned id: %s, expected %s", paper.Provisioner.DeviceID, desktop.Device.DeviceID)
t.Logf("desktop: %+v", desktop)
t.Logf("paper: %+v", paper)
}
}
| TestDeviceHistoryRevoked |
confeditor.py | #!/usr/bin/python3
import configparser
config = configparser.ConfigParser()
config.read('eve-conf.ini')
def int_imp(inp):
while True:
try:
int(inp)
break
except ValueError:
print('Input has to be a number.')
inp = input('Select again: ')
return int(inp)
def section_select(config):
csections = config.sections()
for section in csections:
print('{:>2}. {}'.format(csections.index(section),section))
num = len(csections)
print('% 2.0f. View All' % (num))
num2 = num + 1
print('%- 2.0f. Save File' % (num2))
num3 = num2 + 1
print('% 2.0f. Exit' % (num3))
while True:
inp = input('Select section to edit/option: ')
inp = int_imp(inp)
print()
if inp == num:
print_conf(config)
break
elif inp == num2:
save_file(config)
break
elif inp == num3:
print('Editor Closed')
break
elif inp < 0 or inp > num3:
print('Try again')
else:
item_editor(config, csections[inp])
break
def menu():
|
def print_conf(config):
csections = config.sections()
for section in csections:
print()
print('Section: %s' % (csections[csections.index(section)]))
items = config.items(csections[csections.index(section)])
for item in items:
print('{:>2}. {:<24}: {}'.format(items.index(item),item[0], item[1]))
menu()
def save_file(config):
with open('eve-conf.ini', 'w') as cfgfile:
config.write(cfgfile)
cfgfile.close()
print('Config Saved')
menu()
def item_editor(config, section):
csections = config.sections()
items = config.items(section)
print('Section: {}'.format(section))
for item in items:
print('{:>2}. {:<24}: {}'.format(items.index(item),item[0], item[1]))
print()
menu_b = items.index(item) + 1
print('{:>2}. Back'.format(menu_b))
inp2 = input('Select key to edit: ')
inp2 = int_imp(inp2)
if inp2 == menu_b:
menu()
elif inp2 < 0 or inp2 > menu_b:
print('Try Agin')
item_editor(config, section)
else:
inp2 = int_imp(inp2)
change = input('New value: ')
old_value = config[section][items[inp2][0]]
config.set(section,items[inp2][0],change)
print()
print('Section: %s' % (section))
items = config.items(section)
for item in items:
print('{:>2}. {:<24}: {}'.format(items.index(item),item[0], item[1]))
conf = input('Confim Change [y,N]: ')
if conf == 'y' or conf == 'Y':
print('Config File Edited.')
else:
config.set(section,items[inp2][0],old_value)
print('Config File Not Changed.')
print()
another = input('Edit another key in this section [y,N]: ')
if another == 'y' or another == 'Y':
print()
item_editor(config,section)
else:
menu()
section_select(config)
| print()
print('Menu')
print('{:>2}. Edit a Section'.format(0))
print('{:>2}. View File'.format(1))
print('{:>2}. Save File'.format(2))
print('{:>2}. Exit'.format(3))
while True:
inp = input('Select option: ')
inp = int_imp(inp)
print()
if inp == 0:
section_select(config)
break
elif inp == 1:
print_conf(config)
break
elif inp == 2:
save_file(config)
break
elif inp == 3:
print('Editor Closed')
break
elif inp < 0 or inp > 3:
print('Try again') |
bbands.js | 'use strict'
const _sum = require('lodash/sum')
const _isFinite = require('lodash/isFinite')
const SMA = require('./sma')
const StdDeviation = require('./stddev')
const Indicator = require('./indicator')
class | extends Indicator {
constructor (args = []) {
const [ period = 20, mul = 2 ] = args
super({
args,
id: BollingerBands.id,
name: `BBANDS(${period}, ${mul})`,
seedPeriod: period,
})
this._p = period
this._m = mul
this._ema = new SMA([period])
this._stddev = new StdDeviation([period])
}
static unserialize (args = []) {
return new BollingerBands(args)
}
reset () {
super.reset()
if (this._ema) this._ema.reset()
if (this._stddev) this._stddev.reset()
}
update (value) {
this._ema.update(value)
this._stddev.update(value)
const middle = this._ema.v()
const stddev = this._stddev.v()
return super.update({
top: middle + (this._m * stddev),
middle,
bottom: middle - (this._m * stddev),
})
}
add (value) {
this._ema.add(value)
this._stddev.add(value)
const middle = this._ema.v()
const stddev = this._stddev.v()
return super.add({
top: middle + (this._m * stddev),
middle,
bottom: middle - (this._m * stddev),
})
}
ready () {
return _isFinite((this.v() || {}).middle)
}
crossed (target) {
if (this.l() < 2) {
return false
}
const v = this.v().middle
const prev = this.prev().middle
return (
(v >= target && prev <= target) ||
(v <= target && prev >= target)
)
}
avg (n = 2) {
return _sum(this.nValues(n).map(v => v.middle)) / n
}
}
BollingerBands.id = 'bbands'
BollingerBands.label = 'BB'
BollingerBands.humanLabel = 'Bollinger Bands'
BollingerBands.ui = {
position: 'overlay',
type: 'bbands'
}
BollingerBands.args = [{
label: 'Period',
default: 20,
}, {
label: 'Multiplier',
default: 2,
}]
module.exports = BollingerBands
| BollingerBands |
test_pure.py | # Copyright (c) 2014 Pure Storage, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from copy import deepcopy
import sys
import ddt
import mock
from oslo_utils import units
from six.moves import http_client
from cinder import exception
from cinder import test
from cinder.tests.unit import fake_constants as fake
from cinder.tests.unit import fake_group
from cinder.tests.unit import fake_snapshot
from cinder.tests.unit import fake_volume
def fake_retry(exceptions, interval=1, retries=3, backoff_rate=2):
def _decorator(f):
return f
return _decorator
patch_retry = mock.patch('cinder.utils.retry', fake_retry)
patch_retry.start()
sys.modules['purestorage'] = mock.Mock()
from cinder.volume.drivers import pure
# Only mock utils.retry for cinder.volume.drivers.pure import
patch_retry.stop()
DRIVER_PATH = "cinder.volume.drivers.pure"
BASE_DRIVER_OBJ = DRIVER_PATH + ".PureBaseVolumeDriver"
ISCSI_DRIVER_OBJ = DRIVER_PATH + ".PureISCSIDriver"
FC_DRIVER_OBJ = DRIVER_PATH + ".PureFCDriver"
ARRAY_OBJ = DRIVER_PATH + ".FlashArray"
GET_ARRAY_PRIMARY = {"version": "99.9.9",
"revision": "201411230504+8a400f7",
"array_name": "pure_target1",
"id": "primary_array_id"}
GET_ARRAY_SECONDARY = {"version": "99.9.9",
"revision": "201411230504+8a400f7",
"array_name": "pure_target2",
"id": "secondary_array_id"}
REPLICATION_TARGET_TOKEN = "12345678-abcd-1234-abcd-1234567890ab"
REPLICATION_PROTECTION_GROUP = "cinder-group"
REPLICATION_INTERVAL_IN_SEC = 900
REPLICATION_RETENTION_SHORT_TERM = 14400
REPLICATION_RETENTION_LONG_TERM = 6
REPLICATION_RETENTION_LONG_TERM_PER_DAY = 3
PRIMARY_MANAGEMENT_IP = GET_ARRAY_PRIMARY["array_name"]
API_TOKEN = "12345678-abcd-1234-abcd-1234567890ab"
VOLUME_BACKEND_NAME = "Pure_iSCSI"
ISCSI_PORT_NAMES = ["ct0.eth2", "ct0.eth3", "ct1.eth2", "ct1.eth3"]
FC_PORT_NAMES = ["ct0.fc2", "ct0.fc3", "ct1.fc2", "ct1.fc3"]
ISCSI_IPS = ["10.0.0." + str(i + 1) for i in range(len(ISCSI_PORT_NAMES))]
FC_WWNS = ["21000024ff59fe9" + str(i + 1) for i in range(len(FC_PORT_NAMES))]
HOSTNAME = "computenode1"
PURE_HOST_NAME = pure.PureBaseVolumeDriver._generate_purity_host_name(HOSTNAME)
PURE_HOST = {
"name": PURE_HOST_NAME,
"hgroup": None,
"iqn": [],
"wwn": [],
}
REST_VERSION = "1.2"
VOLUME_ID = "abcdabcd-1234-abcd-1234-abcdeffedcba"
VOLUME_TYPE_ID = "357aa1f1-4f9c-4f10-acec-626af66425ba"
VOLUME = {
"name": "volume-" + VOLUME_ID,
"id": VOLUME_ID,
"display_name": "fake_volume",
"size": 2,
"host": "irrelevant",
"volume_type": None,
"volume_type_id": VOLUME_TYPE_ID,
"replication_status": None,
"consistencygroup_id": None,
"provider_location": GET_ARRAY_PRIMARY["id"],
"group_id": None,
}
VOLUME_PURITY_NAME = VOLUME['name'] + '-cinder'
VOLUME_WITH_CGROUP = VOLUME.copy()
VOLUME_WITH_CGROUP['group_id'] = "4a2f7e3a-312a-40c5-96a8-536b8a0fe074"
VOLUME_WITH_CGROUP['consistencygroup_id'] = \
"4a2f7e3a-312a-40c5-96a8-536b8a0fe074"
SRC_VOL_ID = "dc7a294d-5964-4379-a15f-ce5554734efc"
SRC_VOL = {
"name": "volume-" + SRC_VOL_ID,
"id": SRC_VOL_ID,
"display_name": 'fake_src',
"size": 2,
"host": "irrelevant",
"volume_type": None,
"volume_type_id": None,
"consistencygroup_id": None,
"group_id": None,
}
SNAPSHOT_ID = "04fe2f9a-d0c4-4564-a30d-693cc3657b47"
SNAPSHOT = {
"name": "snapshot-" + SNAPSHOT_ID,
"id": SNAPSHOT_ID,
"volume_id": SRC_VOL_ID,
"volume_name": "volume-" + SRC_VOL_ID,
"volume_size": 2,
"display_name": "fake_snapshot",
"cgsnapshot_id": None,
"cgsnapshot": None,
"group_snapshot_id": None,
"group_snapshot": None,
}
SNAPSHOT_PURITY_NAME = SRC_VOL["name"] + '-cinder.' + SNAPSHOT["name"]
SNAPSHOT_WITH_CGROUP = SNAPSHOT.copy()
SNAPSHOT_WITH_CGROUP['group_snapshot'] = {
"group_id": "4a2f7e3a-312a-40c5-96a8-536b8a0fe044",
}
INITIATOR_IQN = "iqn.1993-08.org.debian:01:222"
INITIATOR_WWN = "5001500150015081abc"
ISCSI_CONNECTOR = {"initiator": INITIATOR_IQN, "host": HOSTNAME}
FC_CONNECTOR = {"wwpns": {INITIATOR_WWN}, "host": HOSTNAME}
TARGET_IQN = "iqn.2010-06.com.purestorage:flasharray.12345abc"
TARGET_WWN = "21000024ff59fe94"
TARGET_PORT = "3260"
INITIATOR_TARGET_MAP =\
{
# _build_initiator_target_map() calls list(set()) on the list,
# we must also call list(set()) to get the exact same order
'5001500150015081abc': list(set(FC_WWNS)),
}
DEVICE_MAPPING =\
{
"fabric": {'initiator_port_wwn_list': {INITIATOR_WWN},
'target_port_wwn_list': FC_WWNS
},
}
ISCSI_PORTS = [{"name": name,
"iqn": TARGET_IQN,
"portal": ip + ":" + TARGET_PORT,
"wwn": None,
} for name, ip in zip(ISCSI_PORT_NAMES, ISCSI_IPS)]
FC_PORTS = [{"name": name,
"iqn": None,
"portal": None,
"wwn": wwn,
} for name, wwn in zip(FC_PORT_NAMES, FC_WWNS)]
NON_ISCSI_PORT = {
"name": "ct0.fc1",
"iqn": None,
"portal": None,
"wwn": "5001500150015081",
}
PORTS_WITH = ISCSI_PORTS + [NON_ISCSI_PORT]
PORTS_WITHOUT = [NON_ISCSI_PORT]
VOLUME_CONNECTIONS = [
{"host": "h1", "name": VOLUME["name"] + "-cinder"},
{"host": "h2", "name": VOLUME["name"] + "-cinder"},
]
TOTAL_CAPACITY = 50.0
USED_SPACE = 32.1
PROVISIONED_CAPACITY = 70.0
DEFAULT_OVER_SUBSCRIPTION = 20
SPACE_INFO = {
"capacity": TOTAL_CAPACITY * units.Gi,
"total": USED_SPACE * units.Gi,
}
SPACE_INFO_EMPTY = {
"capacity": TOTAL_CAPACITY * units.Gi,
"total": 0,
}
PERF_INFO = {
'writes_per_sec': 318,
'usec_per_write_op': 255,
'output_per_sec': 234240,
'reads_per_sec': 15,
'input_per_sec': 2827943,
'time': '2015-12-17T21:50:55Z',
'usec_per_read_op': 192,
'queue_depth': 4,
}
PERF_INFO_RAW = [PERF_INFO]
ISCSI_CONNECTION_INFO = {
"driver_volume_type": "iscsi",
"data": {
"target_discovered": False,
"discard": True,
"target_luns": [1, 1, 1, 1],
"target_iqns": [TARGET_IQN, TARGET_IQN, TARGET_IQN, TARGET_IQN],
"target_portals": [ISCSI_IPS[0] + ":" + TARGET_PORT,
ISCSI_IPS[1] + ":" + TARGET_PORT,
ISCSI_IPS[2] + ":" + TARGET_PORT,
ISCSI_IPS[3] + ":" + TARGET_PORT],
},
}
FC_CONNECTION_INFO = {
"driver_volume_type": "fibre_channel",
"data": {
"target_wwn": FC_WWNS,
"target_lun": 1,
"target_discovered": True,
"initiator_target_map": INITIATOR_TARGET_MAP,
"discard": True,
},
}
PURE_SNAPSHOT = {
"created": "2015-05-27T17:34:33Z",
"name": "vol1.snap1",
"serial": "8343DFDE2DAFBE40000115E4",
"size": 3221225472,
"source": "vol1"
}
PURE_PGROUP = {
"hgroups": None,
"hosts": None,
"name": "pg1",
"source": "pure01",
"targets": None,
"volumes": ["v1"]
}
PGROUP_ON_TARGET_NOT_ALLOWED = {
"name": "array1:replicated_pgroup",
"hgroups": None,
"source": "array1",
"hosts": None,
"volumes": ["array1:replicated_volume"],
"time_remaining": None,
"targets": [{"name": "array2",
"allowed": False}]}
PGROUP_ON_TARGET_ALLOWED = {
"name": "array1:replicated_pgroup",
"hgroups": None,
"source": "array1",
"hosts": None,
"volumes": ["array1:replicated_volume"],
"time_remaining": None,
"targets": [{"name": "array2",
"allowed": True}]}
CONNECTED_ARRAY = {
"id": "6b1a7ce3-da61-0d86-65a7-9772cd259fef",
"version": "99.9.9",
"connected": True,
"management_address": "10.42.10.229",
"replication_address": "192.168.10.229",
"type": ["replication"],
"array_name": "3rd-pure-generic2"}
REPLICATED_PGSNAPS = [
{
"name": "array1:cinder-repl-pg.3",
"created": "2014-12-04T22:59:38Z",
"started": "2014-12-04T22:59:38Z",
"completed": "2014-12-04T22:59:39Z",
"source": "array1:cinder-repl-pg",
"logical_data_transferred": 0,
"progress": 1.0,
"data_transferred": 318
},
{
"name": "array1:cinder-repl-pg.2",
"created": "2014-12-04T21:59:38Z",
"started": "2014-12-04T21:59:38Z",
"completed": "2014-12-04T21:59:39Z",
"source": "array1:cinder-repl-pg",
"logical_data_transferred": 0,
"progress": 1.0,
"data_transferred": 318
},
{
"name": "array1:cinder-repl-pg.1",
"created": "2014-12-04T20:59:38Z",
"started": "2014-12-04T20:59:38Z",
"completed": "2014-12-04T20:59:39Z",
"source": "array1:cinder-repl-pg",
"logical_data_transferred": 0,
"progress": 1.0,
"data_transferred": 318
}]
REPLICATED_VOLUME_OBJS = [
fake_volume.fake_volume_obj(None, id=fake.VOLUME_ID),
fake_volume.fake_volume_obj(None, id=fake.VOLUME2_ID),
fake_volume.fake_volume_obj(None, id=fake.VOLUME3_ID),
]
REPLICATED_VOLUME_SNAPS = [
{
"source": "array1:volume-%s-cinder" % fake.VOLUME_ID,
"serial": "BBA481C01639104E0001D5F7",
"created": "2014-12-04T22:59:38Z",
"name": "array1:cinder-repl-pg.2.volume-%s-cinder" % fake.VOLUME_ID,
"size": 1048576
},
{
"source": "array1:volume-%s-cinder" % fake.VOLUME2_ID,
"serial": "BBA481C01639104E0001D5F8",
"created": "2014-12-04T22:59:38Z",
"name": "array1:cinder-repl-pg.2.volume-%s-cinder" % fake.VOLUME2_ID,
"size": 1048576
},
{
"source": "array1:volume-%s-cinder" % fake.VOLUME3_ID,
"serial": "BBA481C01639104E0001D5F9",
"created": "2014-12-04T22:59:38Z",
"name": "array1:cinder-repl-pg.2.volume-%s-cinder" % fake.VOLUME3_ID,
"size": 1048576
}
]
NON_REPLICATED_VOL_TYPE = {"is_public": True,
"extra_specs": {},
"name": "volume_type_1",
"id": VOLUME_TYPE_ID}
REPLICATED_VOL_TYPE = {"is_public": True,
"extra_specs":
{pure.EXTRA_SPECS_REPL_ENABLED:
"<is> True"},
"name": "volume_type_2",
"id": VOLUME_TYPE_ID}
MANAGEABLE_PURE_VOLS = [
{
'name': 'myVol1',
'serial': '8E9C7E588B16C1EA00048CCA',
'size': 3221225472,
'created': '2016-08-05T17:26:34Z',
'source': None,
},
{
'name': 'myVol2',
'serial': '8E9C7E588B16C1EA00048CCB',
'size': 3221225472,
'created': '2016-08-05T17:26:34Z',
'source': None,
},
{
'name': 'myVol3',
'serial': '8E9C7E588B16C1EA00048CCD',
'size': 3221225472,
'created': '2016-08-05T17:26:34Z',
'source': None,
}
]
MANAGEABLE_PURE_VOL_REFS = [
{
'reference': {'name': 'myVol1'},
'size': 3,
'safe_to_manage': True,
'reason_not_safe': None,
'cinder_id': None,
'extra_info': None,
},
{
'reference': {'name': 'myVol2'},
'size': 3,
'safe_to_manage': True,
'reason_not_safe': None,
'cinder_id': None,
'extra_info': None,
},
{
'reference': {'name': 'myVol3'},
'size': 3,
'safe_to_manage': True,
'reason_not_safe': None,
'cinder_id': None,
'extra_info': None,
}
]
MANAGEABLE_PURE_SNAPS = [
{
'name': 'volume-fd33de6e-56f6-452d-a7b6-451c11089a9f-cinder.snap1',
'serial': '8E9C7E588B16C1EA00048CCA',
'size': 3221225472,
'created': '2016-08-05T17:26:34Z',
'source': 'volume-fd33de6e-56f6-452d-a7b6-451c11089a9f-cinder',
},
{
'name': 'volume-fd33de6e-56f6-452d-a7b6-451c11089a9f-cinder.snap2',
'serial': '8E9C7E588B16C1EA00048CCB',
'size': 4221225472,
'created': '2016-08-05T17:26:34Z',
'source': 'volume-fd33de6e-56f6-452d-a7b6-451c11089a9f-cinder',
},
{
'name': 'volume-fd33de6e-56f6-452d-a7b6-451c11089a9f-cinder.snap3',
'serial': '8E9C7E588B16C1EA00048CCD',
'size': 5221225472,
'created': '2016-08-05T17:26:34Z',
'source': 'volume-fd33de6e-56f6-452d-a7b6-451c11089a9f-cinder',
}
]
MANAGEABLE_PURE_SNAP_REFS = [
{
'reference': {'name': MANAGEABLE_PURE_SNAPS[0]['name']},
'size': 3,
'safe_to_manage': True,
'reason_not_safe': None,
'cinder_id': None,
'extra_info': None,
'source_reference': {'name': MANAGEABLE_PURE_SNAPS[0]['source']},
},
{
'reference': {'name': MANAGEABLE_PURE_SNAPS[1]['name']},
'size': 4,
'safe_to_manage': True,
'reason_not_safe': None,
'cinder_id': None,
'extra_info': None,
'source_reference': {'name': MANAGEABLE_PURE_SNAPS[1]['source']},
},
{
'reference': {'name': MANAGEABLE_PURE_SNAPS[2]['name']},
'size': 5,
'safe_to_manage': True,
'reason_not_safe': None,
'cinder_id': None,
'extra_info': None,
'source_reference': {'name': MANAGEABLE_PURE_SNAPS[2]['source']},
}
]
class FakePureStorageHTTPError(Exception):
def | (self, target=None, rest_version=None, code=None,
headers=None, text=None):
self.target = target
self.rest_version = rest_version
self.code = code
self.headers = headers
self.text = text
class PureDriverTestCase(test.TestCase):
def setUp(self):
super(PureDriverTestCase, self).setUp()
self.mock_config = mock.Mock()
self.mock_config.san_ip = PRIMARY_MANAGEMENT_IP
self.mock_config.pure_api_token = API_TOKEN
self.mock_config.volume_backend_name = VOLUME_BACKEND_NAME
self.mock_config.safe_get.return_value = None
self.mock_config.pure_eradicate_on_delete = False
self.mock_config.driver_ssl_cert_verify = False
self.mock_config.driver_ssl_cert_path = None
self.array = mock.Mock()
self.array.get.return_value = GET_ARRAY_PRIMARY
self.array.array_name = GET_ARRAY_PRIMARY["array_name"]
self.array.array_id = GET_ARRAY_PRIMARY["id"]
self.array2 = mock.Mock()
self.array2.array_name = GET_ARRAY_SECONDARY["array_name"]
self.array2.array_id = GET_ARRAY_SECONDARY["id"]
self.array2.get.return_value = GET_ARRAY_SECONDARY
self.purestorage_module = pure.purestorage
self.purestorage_module.VERSION = '1.4.0'
self.purestorage_module.PureHTTPError = FakePureStorageHTTPError
def fake_get_array(*args, **kwargs):
if 'action' in kwargs and kwargs['action'] is 'monitor':
return PERF_INFO_RAW
if 'space' in kwargs and kwargs['space'] is True:
return SPACE_INFO
def assert_error_propagates(self, mocks, func, *args, **kwargs):
"""Assert that errors from mocks propagate to func.
Fail if exceptions raised by mocks are not seen when calling
func(*args, **kwargs). Ensure that we are really seeing exceptions
from the mocks by failing if just running func(*args, **kargs) raises
an exception itself.
"""
func(*args, **kwargs)
for mock_func in mocks:
original_side_effect = mock_func.side_effect
mock_func.side_effect = [exception.PureDriverException(
reason='reason')]
self.assertRaises(exception.PureDriverException,
func, *args, **kwargs)
mock_func.side_effect = original_side_effect
@mock.patch('platform.platform')
def test_for_user_agent(self, mock_platform):
mock_platform.return_value = 'MyFavoritePlatform'
driver = pure.PureBaseVolumeDriver(configuration=self.mock_config)
expected_agent = "OpenStack Cinder %s/%s (MyFavoritePlatform)" % (
driver.__class__.__name__,
driver.VERSION
)
self.assertEqual(expected_agent, driver._user_agent)
class PureBaseSharedDriverTestCase(PureDriverTestCase):
def setUp(self):
super(PureBaseSharedDriverTestCase, self).setUp()
self.driver = pure.PureBaseVolumeDriver(configuration=self.mock_config)
self.driver._array = self.array
self.array.get_rest_version.return_value = '1.4'
self.purestorage_module.FlashArray.side_effect = None
self.array2.get_rest_version.return_value = '1.4'
def tearDown(self):
super(PureBaseSharedDriverTestCase, self).tearDown()
@ddt.ddt
class PureBaseVolumeDriverTestCase(PureBaseSharedDriverTestCase):
def _setup_mocks_for_replication(self):
# Mock config values
self.mock_config.pure_replica_interval_default = (
REPLICATION_INTERVAL_IN_SEC)
self.mock_config.pure_replica_retention_short_term_default = (
REPLICATION_RETENTION_SHORT_TERM)
self.mock_config.pure_replica_retention_long_term_default = (
REPLICATION_RETENTION_LONG_TERM)
self.mock_config.pure_replica_retention_long_term_default = (
REPLICATION_RETENTION_LONG_TERM_PER_DAY)
self.mock_config.safe_get.return_value = [
{"backend_id": self.driver._array.array_id,
"managed_backend_name": None,
"san_ip": "1.2.3.4",
"api_token": "abc123"}]
@mock.patch(BASE_DRIVER_OBJ + '._generate_replication_retention')
@mock.patch(BASE_DRIVER_OBJ + '._setup_replicated_pgroups')
def test_parse_replication_configs_single_target(
self,
mock_setup_repl_pgroups,
mock_generate_replication_retention):
retention = mock.MagicMock()
mock_generate_replication_retention.return_value = retention
mock_setup_repl_pgroups.return_value = None
# Test single array configured
self.mock_config.safe_get.return_value = [
{"backend_id": self.driver._array.id,
"managed_backend_name": None,
"san_ip": "1.2.3.4",
"api_token": "abc123"}]
self.purestorage_module.FlashArray.return_value = self.array
self.driver.parse_replication_configs()
self.assertEqual(1, len(self.driver._replication_target_arrays))
self.assertEqual(self.array, self.driver._replication_target_arrays[0])
only_target_array = self.driver._replication_target_arrays[0]
self.assertEqual(self.driver._array.id,
only_target_array._backend_id)
@mock.patch(BASE_DRIVER_OBJ + '._generate_replication_retention')
@mock.patch(BASE_DRIVER_OBJ + '._setup_replicated_pgroups')
def test_parse_replication_configs_multiple_target(
self,
mock_setup_repl_pgroups,
mock_generate_replication_retention):
retention = mock.MagicMock()
mock_generate_replication_retention.return_value = retention
mock_setup_repl_pgroups.return_value = None
# Test multiple arrays configured
self.mock_config.safe_get.return_value = [
{"backend_id": GET_ARRAY_PRIMARY["id"],
"managed_backend_name": None,
"san_ip": "1.2.3.4",
"api_token": "abc123"},
{"backend_id": GET_ARRAY_SECONDARY["id"],
"managed_backend_name": None,
"san_ip": "1.2.3.5",
"api_token": "abc124"}]
self.purestorage_module.FlashArray.side_effect = \
[self.array, self.array2]
self.driver.parse_replication_configs()
self.assertEqual(2, len(self.driver._replication_target_arrays))
self.assertEqual(self.array, self.driver._replication_target_arrays[0])
first_target_array = self.driver._replication_target_arrays[0]
self.assertEqual(GET_ARRAY_PRIMARY["id"],
first_target_array._backend_id)
self.assertEqual(
self.array2, self.driver._replication_target_arrays[1])
second_target_array = self.driver._replication_target_arrays[1]
self.assertEqual(GET_ARRAY_SECONDARY["id"],
second_target_array._backend_id)
@mock.patch(BASE_DRIVER_OBJ + '._generate_replication_retention')
@mock.patch(BASE_DRIVER_OBJ + '._setup_replicated_pgroups')
@mock.patch('cinder.volume.volume_types.get_volume_type')
def test_do_setup_replicated(self, mock_get_volume_type,
mock_setup_repl_pgroups,
mock_generate_replication_retention):
retention = mock.MagicMock()
mock_generate_replication_retention.return_value = retention
mock_get_volume_type.return_value = REPLICATED_VOL_TYPE
self._setup_mocks_for_replication()
self.array2.get.return_value = GET_ARRAY_SECONDARY
self.array.get.return_value = GET_ARRAY_PRIMARY
self.purestorage_module.FlashArray.side_effect = [self.array,
self.array2]
self.driver.do_setup(None)
self.assertEqual(self.array, self.driver._array)
self.assertEqual(1, len(self.driver._replication_target_arrays))
self.assertEqual(self.array2,
self.driver._replication_target_arrays[0])
calls = [
mock.call(self.array, [self.array2], 'cinder-group',
REPLICATION_INTERVAL_IN_SEC, retention)
]
mock_setup_repl_pgroups.assert_has_calls(calls)
def test_generate_purity_host_name(self):
result = self.driver._generate_purity_host_name(
"really-long-string-thats-a-bit-too-long")
self.assertTrue(result.startswith("really-long-string-that-"))
self.assertTrue(result.endswith("-cinder"))
self.assertEqual(63, len(result))
self.assertTrue(pure.GENERATED_NAME.match(result))
result = self.driver._generate_purity_host_name("!@#$%^-invalid&*")
self.assertTrue(result.startswith("invalid---"))
self.assertTrue(result.endswith("-cinder"))
self.assertEqual(49, len(result))
self.assertTrue(pure.GENERATED_NAME.match(result))
@mock.patch(BASE_DRIVER_OBJ + "._add_to_group_if_needed")
@mock.patch(BASE_DRIVER_OBJ + "._is_volume_replicated_type", autospec=True)
def test_create_volume(self, mock_is_replicated_type, mock_add_to_group):
mock_is_replicated_type.return_value = False
self.driver.create_volume(VOLUME)
vol_name = VOLUME["name"] + "-cinder"
self.array.create_volume.assert_called_with(
vol_name, 2 * units.Gi)
mock_add_to_group.assert_called_once_with(VOLUME,
vol_name)
self.assert_error_propagates([self.array.create_volume],
self.driver.create_volume, VOLUME)
@mock.patch(BASE_DRIVER_OBJ + "._add_to_group_if_needed")
@mock.patch(BASE_DRIVER_OBJ + "._is_volume_replicated_type", autospec=True)
def test_create_volume_from_snapshot(self, mock_is_replicated_type,
mock_add_to_group):
vol_name = VOLUME["name"] + "-cinder"
snap_name = SNAPSHOT["volume_name"] + "-cinder." + SNAPSHOT["name"]
mock_is_replicated_type.return_value = False
# Branch where extend unneeded
self.driver.create_volume_from_snapshot(VOLUME, SNAPSHOT)
self.array.copy_volume.assert_called_with(snap_name, vol_name)
self.assertFalse(self.array.extend_volume.called)
mock_add_to_group.assert_called_once_with(VOLUME,
vol_name)
self.assert_error_propagates(
[self.array.copy_volume],
self.driver.create_volume_from_snapshot, VOLUME, SNAPSHOT)
self.assertFalse(self.array.extend_volume.called)
@mock.patch(BASE_DRIVER_OBJ + "._add_to_group_if_needed")
@mock.patch(BASE_DRIVER_OBJ + "._is_volume_replicated_type",
autospec=True)
def test_create_volume_from_snapshot_with_extend(self,
mock_is_replicated_type,
mock_add_to_group):
vol_name = VOLUME["name"] + "-cinder"
snap_name = SNAPSHOT["volume_name"] + "-cinder." + SNAPSHOT["name"]
mock_is_replicated_type.return_value = False
# Branch where extend needed
src = deepcopy(SNAPSHOT)
src["volume_size"] = 1 # resize so smaller than VOLUME
self.driver.create_volume_from_snapshot(VOLUME, src)
expected = [mock.call.copy_volume(snap_name, vol_name),
mock.call.extend_volume(vol_name, 2 * units.Gi)]
self.array.assert_has_calls(expected)
mock_add_to_group.assert_called_once_with(VOLUME,
vol_name)
@mock.patch(BASE_DRIVER_OBJ + "._get_snap_name")
def test_create_volume_from_snapshot_cant_get_name(self, mock_get_name):
mock_get_name.return_value = None
self.assertRaises(exception.PureDriverException,
self.driver.create_volume_from_snapshot,
VOLUME, SNAPSHOT)
@mock.patch(BASE_DRIVER_OBJ + "._get_pgroup_snap_name_from_snapshot")
def test_create_volume_from_cgsnapshot_cant_get_name(self, mock_get_name):
mock_get_name.return_value = None
self.assertRaises(exception.PureDriverException,
self.driver.create_volume_from_snapshot,
VOLUME, SNAPSHOT_WITH_CGROUP)
@mock.patch(BASE_DRIVER_OBJ + "._add_to_group_if_needed")
@mock.patch(BASE_DRIVER_OBJ + "._extend_if_needed", autospec=True)
@mock.patch(BASE_DRIVER_OBJ + "._get_pgroup_snap_name_from_snapshot")
@mock.patch(BASE_DRIVER_OBJ + "._is_volume_replicated_type", autospec=True)
def test_create_volume_from_cgsnapshot(self, mock_is_replicated_type,
mock_get_snap_name,
mock_extend_if_needed,
mock_add_to_group):
vol_name = VOLUME_WITH_CGROUP["name"] + "-cinder"
snap_name = "consisgroup-4a2f7e3a-312a-40c5-96a8-536b8a0f" \
"e074-cinder.4a2f7e3a-312a-40c5-96a8-536b8a0fe075."\
+ vol_name
mock_get_snap_name.return_value = snap_name
mock_is_replicated_type.return_value = False
self.driver.create_volume_from_snapshot(VOLUME_WITH_CGROUP,
SNAPSHOT_WITH_CGROUP)
self.array.copy_volume.assert_called_with(snap_name, vol_name)
self.assertTrue(mock_get_snap_name.called)
self.assertTrue(mock_extend_if_needed.called)
self.driver.create_volume_from_snapshot(VOLUME_WITH_CGROUP,
SNAPSHOT_WITH_CGROUP)
mock_add_to_group\
.assert_called_with(VOLUME_WITH_CGROUP,
vol_name)
# Tests cloning a volume that is not replicated type
@mock.patch(BASE_DRIVER_OBJ + "._add_to_group_if_needed")
@mock.patch(BASE_DRIVER_OBJ + "._is_volume_replicated_type", autospec=True)
def test_create_cloned_volume(self, mock_is_replicated_type,
mock_add_to_group):
vol_name = VOLUME["name"] + "-cinder"
src_name = SRC_VOL["name"] + "-cinder"
mock_is_replicated_type.return_value = False
# Branch where extend unneeded
self.driver.create_cloned_volume(VOLUME, SRC_VOL)
self.array.copy_volume.assert_called_with(src_name, vol_name)
self.assertFalse(self.array.extend_volume.called)
mock_add_to_group.assert_called_once_with(VOLUME,
vol_name)
self.assert_error_propagates(
[self.array.copy_volume],
self.driver.create_cloned_volume, VOLUME, SRC_VOL)
self.assertFalse(self.array.extend_volume.called)
@mock.patch(BASE_DRIVER_OBJ + "._add_to_group_if_needed")
@mock.patch(BASE_DRIVER_OBJ + "._is_volume_replicated_type",
autospec=True)
def test_create_cloned_volume_and_extend(self, mock_is_replicated_type,
mock_add_to_group):
vol_name = VOLUME["name"] + "-cinder"
src_name = SRC_VOL["name"] + "-cinder"
src = deepcopy(SRC_VOL)
src["size"] = 1 # resize so smaller than VOLUME
self.driver.create_cloned_volume(VOLUME, src)
expected = [mock.call.copy_volume(src_name, vol_name),
mock.call.extend_volume(vol_name, 2 * units.Gi)]
self.array.assert_has_calls(expected)
mock_add_to_group.assert_called_once_with(VOLUME,
vol_name)
# Tests cloning a volume that is part of a consistency group
@mock.patch(BASE_DRIVER_OBJ + "._add_to_group_if_needed")
@mock.patch(BASE_DRIVER_OBJ + "._is_volume_replicated_type", autospec=True)
def test_create_cloned_volume_with_cgroup(self, mock_is_replicated_type,
mock_add_to_group):
vol_name = VOLUME_WITH_CGROUP["name"] + "-cinder"
mock_is_replicated_type.return_value = False
self.driver.create_cloned_volume(VOLUME_WITH_CGROUP, SRC_VOL)
mock_add_to_group.assert_called_with(VOLUME_WITH_CGROUP,
vol_name)
def test_delete_volume_already_deleted(self):
self.array.list_volume_private_connections.side_effect = \
self.purestorage_module.PureHTTPError(
code=400,
text="Volume does not exist"
)
self.driver.delete_volume(VOLUME)
self.assertFalse(self.array.destroy_volume.called)
self.assertFalse(self.array.eradicate_volume.called)
# Testing case where array.destroy_volume returns an exception
# because volume has already been deleted
self.array.list_volume_private_connections.side_effect = None
self.array.list_volume_private_connections.return_value = {}
self.array.destroy_volume.side_effect = \
self.purestorage_module.PureHTTPError(
code=http_client.BAD_REQUEST,
text="Volume does not exist"
)
self.driver.delete_volume(VOLUME)
self.assertTrue(self.array.destroy_volume.called)
self.assertFalse(self.array.eradicate_volume.called)
def test_delete_volume(self):
vol_name = VOLUME["name"] + "-cinder"
self.array.list_volume_private_connections.return_value = {}
self.driver.delete_volume(VOLUME)
expected = [mock.call.destroy_volume(vol_name)]
self.array.assert_has_calls(expected)
self.assertFalse(self.array.eradicate_volume.called)
self.array.destroy_volume.side_effect = (
self.purestorage_module.PureHTTPError(code=http_client.BAD_REQUEST,
text="does not exist"))
self.driver.delete_volume(VOLUME)
self.array.destroy_volume.side_effect = None
self.assert_error_propagates([self.array.destroy_volume],
self.driver.delete_volume, VOLUME)
def test_delete_volume_eradicate_now(self):
vol_name = VOLUME["name"] + "-cinder"
self.array.list_volume_private_connections.return_value = {}
self.mock_config.pure_eradicate_on_delete = True
self.driver.delete_volume(VOLUME)
expected = [mock.call.destroy_volume(vol_name),
mock.call.eradicate_volume(vol_name)]
self.array.assert_has_calls(expected)
def test_delete_connected_volume(self):
vol_name = VOLUME["name"] + "-cinder"
host_name_a = "ha"
host_name_b = "hb"
self.array.list_volume_private_connections.return_value = [{
"host": host_name_a,
"lun": 7,
"name": vol_name,
"size": 3221225472,
}, {
"host": host_name_b,
"lun": 2,
"name": vol_name,
"size": 3221225472,
}]
self.driver.delete_volume(VOLUME)
expected = [mock.call.list_volume_private_connections(vol_name),
mock.call.disconnect_host(host_name_a, vol_name),
mock.call.list_host_connections(host_name_a, private=True),
mock.call.disconnect_host(host_name_b, vol_name),
mock.call.list_host_connections(host_name_b, private=True),
mock.call.destroy_volume(vol_name)]
self.array.assert_has_calls(expected)
def test_create_snapshot(self):
vol_name = SRC_VOL["name"] + "-cinder"
self.driver.create_snapshot(SNAPSHOT)
self.array.create_snapshot.assert_called_with(
vol_name,
suffix=SNAPSHOT["name"]
)
self.assert_error_propagates([self.array.create_snapshot],
self.driver.create_snapshot, SNAPSHOT)
@ddt.data("does not exist", "has been destroyed")
def test_delete_snapshot(self, error_text):
snap_name = SNAPSHOT["volume_name"] + "-cinder." + SNAPSHOT["name"]
self.driver.delete_snapshot(SNAPSHOT)
expected = [mock.call.destroy_volume(snap_name)]
self.array.assert_has_calls(expected)
self.assertFalse(self.array.eradicate_volume.called)
self.array.destroy_volume.side_effect = (
self.purestorage_module.PureHTTPError(code=http_client.BAD_REQUEST,
text=error_text))
self.driver.delete_snapshot(SNAPSHOT)
self.array.destroy_volume.side_effect = None
self.assert_error_propagates([self.array.destroy_volume],
self.driver.delete_snapshot, SNAPSHOT)
def test_delete_snapshot_eradicate_now(self):
snap_name = SNAPSHOT["volume_name"] + "-cinder." + SNAPSHOT["name"]
self.mock_config.pure_eradicate_on_delete = True
self.driver.delete_snapshot(SNAPSHOT)
expected = [mock.call.destroy_volume(snap_name),
mock.call.eradicate_volume(snap_name)]
self.array.assert_has_calls(expected)
@mock.patch(BASE_DRIVER_OBJ + "._get_host", autospec=True)
def test_terminate_connection(self, mock_host):
vol_name = VOLUME["name"] + "-cinder"
mock_host.return_value = {"name": "some-host"}
# Branch with manually created host
self.driver.terminate_connection(VOLUME, ISCSI_CONNECTOR)
self.array.disconnect_host.assert_called_with("some-host", vol_name)
self.assertTrue(self.array.list_host_connections.called)
self.assertFalse(self.array.delete_host.called)
# Branch with host added to host group
self.array.reset_mock()
self.array.list_host_connections.return_value = []
mock_host.return_value = PURE_HOST.copy()
mock_host.return_value.update(hgroup="some-group")
self.driver.terminate_connection(VOLUME, ISCSI_CONNECTOR)
self.array.disconnect_host.assert_called_with(PURE_HOST_NAME, vol_name)
self.assertTrue(self.array.list_host_connections.called)
self.assertTrue(self.array.delete_host.called)
# Branch with host still having connected volumes
self.array.reset_mock()
self.array.list_host_connections.return_value = [
{"lun": 2, "name": PURE_HOST_NAME, "vol": "some-vol"}]
mock_host.return_value = PURE_HOST
self.driver.terminate_connection(VOLUME, ISCSI_CONNECTOR)
self.array.disconnect_host.assert_called_with(PURE_HOST_NAME, vol_name)
self.array.list_host_connections.assert_called_with(PURE_HOST_NAME,
private=True)
self.assertFalse(self.array.delete_host.called)
# Branch where host gets deleted
self.array.reset_mock()
self.array.list_host_connections.return_value = []
self.driver.terminate_connection(VOLUME, ISCSI_CONNECTOR)
self.array.disconnect_host.assert_called_with(PURE_HOST_NAME, vol_name)
self.array.list_host_connections.assert_called_with(PURE_HOST_NAME,
private=True)
self.array.delete_host.assert_called_with(PURE_HOST_NAME)
# Branch where connection is missing and the host is still deleted
self.array.reset_mock()
self.array.disconnect_host.side_effect = \
self.purestorage_module.PureHTTPError(code=http_client.BAD_REQUEST,
text="is not connected")
self.driver.terminate_connection(VOLUME, ISCSI_CONNECTOR)
self.array.disconnect_host.assert_called_with(PURE_HOST_NAME, vol_name)
self.array.list_host_connections.assert_called_with(PURE_HOST_NAME,
private=True)
self.array.delete_host.assert_called_with(PURE_HOST_NAME)
# Branch where an unexpected exception occurs
self.array.reset_mock()
self.array.disconnect_host.side_effect = \
self.purestorage_module.PureHTTPError(
code=http_client.INTERNAL_SERVER_ERROR,
text="Some other error"
)
self.assertRaises(self.purestorage_module.PureHTTPError,
self.driver.terminate_connection,
VOLUME,
ISCSI_CONNECTOR)
self.array.disconnect_host.assert_called_with(PURE_HOST_NAME, vol_name)
self.assertFalse(self.array.list_host_connections.called)
self.assertFalse(self.array.delete_host.called)
def _test_terminate_connection_with_error(self, mock_host, error):
vol_name = VOLUME["name"] + "-cinder"
mock_host.return_value = PURE_HOST.copy()
self.array.reset_mock()
self.array.list_host_connections.return_value = []
self.array.delete_host.side_effect = \
self.purestorage_module.PureHTTPError(code=http_client.BAD_REQUEST,
text=error)
self.driver.terminate_connection(VOLUME, ISCSI_CONNECTOR)
self.array.disconnect_host.assert_called_with(PURE_HOST_NAME, vol_name)
self.array.list_host_connections.assert_called_with(PURE_HOST_NAME,
private=True)
self.array.delete_host.assert_called_once_with(PURE_HOST_NAME)
@mock.patch(BASE_DRIVER_OBJ + "._get_host", autospec=True)
def test_terminate_connection_host_deleted(self, mock_host):
self._test_terminate_connection_with_error(mock_host,
'Host does not exist.')
@mock.patch(BASE_DRIVER_OBJ + "._get_host", autospec=True)
def test_terminate_connection_host_got_new_connections(self, mock_host):
self._test_terminate_connection_with_error(
mock_host,
'Host cannot be deleted due to existing connections.'
)
def test_extend_volume(self):
vol_name = VOLUME["name"] + "-cinder"
self.driver.extend_volume(VOLUME, 3)
self.array.extend_volume.assert_called_with(vol_name, 3 * units.Gi)
self.assert_error_propagates([self.array.extend_volume],
self.driver.extend_volume, VOLUME, 3)
def test_get_pgroup_name_from_id(self):
id = "4a2f7e3a-312a-40c5-96a8-536b8a0fe074"
expected_name = "consisgroup-%s-cinder" % id
actual_name = self.driver._get_pgroup_name_from_id(id)
self.assertEqual(expected_name, actual_name)
def test_get_pgroup_snap_suffix(self):
cgsnap = {
'id': "4a2f7e3a-312a-40c5-96a8-536b8a0fe074"
}
expected_suffix = "cgsnapshot-%s-cinder" % cgsnap['id']
actual_suffix = self.driver._get_pgroup_snap_suffix(cgsnap)
self.assertEqual(expected_suffix, actual_suffix)
def test_get_pgroup_snap_name(self):
cg_id = "4a2f7e3a-312a-40c5-96a8-536b8a0fe074"
cgsnap_id = "4a2f7e3a-312a-40c5-96a8-536b8a0fe075"
cgsnap = {
'id': cgsnap_id,
'group_id': cg_id
}
expected_name = "consisgroup-%(cg)s-cinder.cgsnapshot-%(snap)s-cinder"\
% {"cg": cg_id, "snap": cgsnap_id}
actual_name = self.driver._get_pgroup_snap_name(cgsnap)
self.assertEqual(expected_name, actual_name)
def test_get_pgroup_snap_name_from_snapshot(self):
groupsnapshot_id = 'b919b266-23b4-4b83-9a92-e66031b9a921'
volume_name = 'volume-a3b8b294-8494-4a72-bec7-9aadec561332'
cg_id = '0cfc0e4e-5029-4839-af20-184fbc42a9ed'
pgsnap_name_base = (
'consisgroup-%s-cinder.cgsnapshot-%s-cinder.%s-cinder')
pgsnap_name = pgsnap_name_base % (cg_id, groupsnapshot_id, volume_name)
self.driver.db = mock.MagicMock()
cgsnap = {
'id': groupsnapshot_id,
'group_id': cg_id
}
self.driver.db.group_snapshot_get.return_value = cgsnap
mock_snap = mock.MagicMock()
mock_snap.group_snapshot = cgsnap
mock_snap.volume_name = volume_name
actual_name = self.driver._get_pgroup_snap_name_from_snapshot(
mock_snap
)
self.assertEqual(pgsnap_name, actual_name)
def test_create_consistencygroup(self):
mock_cgroup = mock.Mock()
mock_cgroup.id = "4a2f7e3a-312a-40c5-96a8-536b8a0fe074"
model_update = self.driver.create_consistencygroup(None, mock_cgroup)
expected_name = self.driver._get_pgroup_name_from_id(mock_cgroup.id)
self.array.create_pgroup.assert_called_with(expected_name)
self.assertEqual({'status': 'available'}, model_update)
self.assert_error_propagates(
[self.array.create_pgroup],
self.driver.create_consistencygroup, None, mock_cgroup)
@mock.patch(BASE_DRIVER_OBJ + ".create_volume_from_snapshot")
@mock.patch(BASE_DRIVER_OBJ + ".create_consistencygroup")
def test_create_consistencygroup_from_cgsnapshot(self, mock_create_cg,
mock_create_vol):
mock_context = mock.Mock()
mock_group = mock.Mock()
mock_cgsnapshot = mock.Mock()
mock_snapshots = [mock.Mock() for i in range(5)]
mock_volumes = [mock.Mock() for i in range(5)]
result = self.driver.create_consistencygroup_from_src(
mock_context,
mock_group,
mock_volumes,
cgsnapshot=mock_cgsnapshot,
snapshots=mock_snapshots,
source_cg=None,
source_vols=None
)
self.assertEqual((None, None), result)
mock_create_cg.assert_called_with(mock_context, mock_group)
expected_calls = [mock.call(vol, snap)
for vol, snap in zip(mock_volumes, mock_snapshots)]
mock_create_vol.assert_has_calls(expected_calls,
any_order=True)
self.assert_error_propagates(
[mock_create_vol, mock_create_cg],
self.driver.create_consistencygroup_from_src,
mock_context,
mock_group,
mock_volumes,
cgsnapshot=mock_cgsnapshot,
snapshots=mock_snapshots,
source_cg=None,
source_vols=None
)
@mock.patch(BASE_DRIVER_OBJ + ".create_consistencygroup")
def test_create_consistencygroup_from_cg(self, mock_create_cg):
num_volumes = 5
mock_context = mock.MagicMock()
mock_group = mock.MagicMock()
mock_source_cg = mock.MagicMock()
mock_volumes = [mock.MagicMock() for i in range(num_volumes)]
mock_source_vols = [mock.MagicMock() for i in range(num_volumes)]
result = self.driver.create_consistencygroup_from_src(
mock_context,
mock_group,
mock_volumes,
source_cg=mock_source_cg,
source_vols=mock_source_vols
)
self.assertEqual((None, None), result)
mock_create_cg.assert_called_with(mock_context, mock_group)
self.assertTrue(self.array.create_pgroup_snapshot.called)
self.assertEqual(num_volumes, self.array.copy_volume.call_count)
self.assertEqual(num_volumes, self.array.set_pgroup.call_count)
self.assertTrue(self.array.destroy_pgroup.called)
@mock.patch(BASE_DRIVER_OBJ + ".create_consistencygroup")
def test_create_consistencygroup_from_cg_with_error(self, mock_create_cg):
num_volumes = 5
mock_context = mock.MagicMock()
mock_group = mock.MagicMock()
mock_source_cg = mock.MagicMock()
mock_volumes = [mock.MagicMock() for i in range(num_volumes)]
mock_source_vols = [mock.MagicMock() for i in range(num_volumes)]
self.array.copy_volume.side_effect = FakePureStorageHTTPError()
self.assertRaises(
FakePureStorageHTTPError,
self.driver.create_consistencygroup_from_src,
mock_context,
mock_group,
mock_volumes,
source_cg=mock_source_cg,
source_vols=mock_source_vols
)
mock_create_cg.assert_called_with(mock_context, mock_group)
self.assertTrue(self.array.create_pgroup_snapshot.called)
# Make sure that the temp snapshot is cleaned up even when copying
# the volume fails!
self.assertTrue(self.array.destroy_pgroup.called)
@mock.patch(BASE_DRIVER_OBJ + ".delete_volume", autospec=True)
def test_delete_consistencygroup(self, mock_delete_volume):
mock_cgroup = mock.MagicMock()
mock_cgroup.id = "4a2f7e3a-312a-40c5-96a8-536b8a0fe074"
mock_cgroup['status'] = "deleted"
mock_context = mock.Mock()
mock_volume = mock.MagicMock()
model_update, volumes = self.driver.delete_consistencygroup(
mock_context, mock_cgroup, [mock_volume])
expected_name = self.driver._get_pgroup_name_from_id(mock_cgroup.id)
self.array.destroy_pgroup.assert_called_with(expected_name)
self.assertFalse(self.array.eradicate_pgroup.called)
self.assertIsNone(volumes)
self.assertIsNone(model_update)
mock_delete_volume.assert_called_with(self.driver, mock_volume)
self.array.destroy_pgroup.side_effect = \
self.purestorage_module.PureHTTPError(
code=http_client.BAD_REQUEST,
text="Protection group has been destroyed."
)
self.driver.delete_consistencygroup(mock_context,
mock_cgroup,
[mock_volume])
self.array.destroy_pgroup.assert_called_with(expected_name)
self.assertFalse(self.array.eradicate_pgroup.called)
mock_delete_volume.assert_called_with(self.driver, mock_volume)
self.array.destroy_pgroup.side_effect = \
self.purestorage_module.PureHTTPError(
code=http_client.BAD_REQUEST,
text="Protection group does not exist"
)
self.driver.delete_consistencygroup(mock_context,
mock_cgroup,
[mock_volume])
self.array.destroy_pgroup.assert_called_with(expected_name)
self.assertFalse(self.array.eradicate_pgroup.called)
mock_delete_volume.assert_called_with(self.driver, mock_volume)
self.array.destroy_pgroup.side_effect = \
self.purestorage_module.PureHTTPError(
code=http_client.BAD_REQUEST,
text="Some other error"
)
self.assertRaises(self.purestorage_module.PureHTTPError,
self.driver.delete_consistencygroup,
mock_context,
mock_volume,
[mock_volume])
self.array.destroy_pgroup.side_effect = \
self.purestorage_module.PureHTTPError(
code=http_client.INTERNAL_SERVER_ERROR,
text="Another different error"
)
self.assertRaises(self.purestorage_module.PureHTTPError,
self.driver.delete_consistencygroup,
mock_context,
mock_volume,
[mock_volume])
self.array.destroy_pgroup.side_effect = None
self.assert_error_propagates(
[self.array.destroy_pgroup],
self.driver.delete_consistencygroup,
mock_context,
mock_cgroup,
[mock_volume]
)
def _create_mock_cg(self):
mock_group = mock.MagicMock()
mock_group.id = "4a2f7e3a-312a-40c5-96a8-536b8a0fe074"
mock_group.status = "Available"
mock_group.cg_name = "consisgroup-" + mock_group.id + "-cinder"
return mock_group
def test_update_consistencygroup(self):
mock_group = self._create_mock_cg()
add_vols = [
{'name': 'vol1'},
{'name': 'vol2'},
{'name': 'vol3'},
]
expected_addvollist = [vol['name'] + '-cinder' for vol in add_vols]
remove_vols = [
{'name': 'vol4'},
{'name': 'vol5'},
]
expected_remvollist = [vol['name'] + '-cinder' for vol in remove_vols]
self.driver.update_consistencygroup(mock.Mock(), mock_group,
add_vols, remove_vols)
self.array.set_pgroup.assert_called_with(
mock_group.cg_name,
addvollist=expected_addvollist,
remvollist=expected_remvollist
)
def test_update_consistencygroup_no_add_vols(self):
mock_group = self._create_mock_cg()
expected_addvollist = []
remove_vols = [
{'name': 'vol4'},
{'name': 'vol5'},
]
expected_remvollist = [vol['name'] + '-cinder' for vol in remove_vols]
self.driver.update_consistencygroup(mock.Mock(), mock_group,
None, remove_vols)
self.array.set_pgroup.assert_called_with(
mock_group.cg_name,
addvollist=expected_addvollist,
remvollist=expected_remvollist
)
def test_update_consistencygroup_no_remove_vols(self):
mock_group = self._create_mock_cg()
add_vols = [
{'name': 'vol1'},
{'name': 'vol2'},
{'name': 'vol3'},
]
expected_addvollist = [vol['name'] + '-cinder' for vol in add_vols]
expected_remvollist = []
self.driver.update_consistencygroup(mock.Mock(), mock_group,
add_vols, None)
self.array.set_pgroup.assert_called_with(
mock_group.cg_name,
addvollist=expected_addvollist,
remvollist=expected_remvollist
)
def test_update_consistencygroup_no_vols(self):
mock_group = self._create_mock_cg()
self.driver.update_consistencygroup(mock.Mock(), mock_group,
None, None)
self.array.set_pgroup.assert_called_with(
mock_group.cg_name,
addvollist=[],
remvollist=[]
)
def test_create_cgsnapshot(self):
mock_cgsnap = {
'id': "4a2f7e3a-312a-40c5-96a8-536b8a0fe074",
'group_id': "4a2f7e3a-312a-40c5-96a8-536b8a0fe075",
}
mock_context = mock.Mock()
mock_snap = mock.MagicMock()
model_update, snapshots = self.driver.create_cgsnapshot(mock_context,
mock_cgsnap,
[mock_snap])
cg_id = mock_cgsnap["group_id"]
expected_pgroup_name = self.driver._get_pgroup_name_from_id(cg_id)
expected_snap_suffix = self.driver._get_pgroup_snap_suffix(mock_cgsnap)
self.array.create_pgroup_snapshot\
.assert_called_with(expected_pgroup_name,
suffix=expected_snap_suffix)
self.assertIsNone(model_update)
self.assertIsNone(snapshots)
self.assert_error_propagates(
[self.array.create_pgroup_snapshot],
self.driver.create_cgsnapshot, mock_context, mock_cgsnap, [])
@mock.patch(BASE_DRIVER_OBJ + "._get_pgroup_snap_name",
spec=pure.PureBaseVolumeDriver._get_pgroup_snap_name)
def test_delete_cgsnapshot(self, mock_get_snap_name):
snap_name = "consisgroup-4a2f7e3a-312a-40c5-96a8-536b8a0f" \
"e074-cinder.4a2f7e3a-312a-40c5-96a8-536b8a0fe075"
mock_get_snap_name.return_value = snap_name
mock_cgsnap = mock.Mock()
mock_cgsnap.status = 'deleted'
mock_context = mock.Mock()
mock_snap = mock.Mock()
model_update, snapshots = self.driver.delete_cgsnapshot(mock_context,
mock_cgsnap,
[mock_snap])
self.array.destroy_pgroup.assert_called_with(snap_name)
self.assertFalse(self.array.eradicate_pgroup.called)
self.assertIsNone(model_update)
self.assertIsNone(snapshots)
self.array.destroy_pgroup.side_effect = \
self.purestorage_module.PureHTTPError(
code=http_client.BAD_REQUEST,
text="Protection group snapshot has been destroyed."
)
self.driver.delete_cgsnapshot(mock_context, mock_cgsnap, [mock_snap])
self.array.destroy_pgroup.assert_called_with(snap_name)
self.assertFalse(self.array.eradicate_pgroup.called)
self.array.destroy_pgroup.side_effect = \
self.purestorage_module.PureHTTPError(
code=http_client.BAD_REQUEST,
text="Protection group snapshot does not exist"
)
self.driver.delete_cgsnapshot(mock_context, mock_cgsnap, [mock_snap])
self.array.destroy_pgroup.assert_called_with(snap_name)
self.assertFalse(self.array.eradicate_pgroup.called)
self.array.destroy_pgroup.side_effect = \
self.purestorage_module.PureHTTPError(
code=http_client.BAD_REQUEST,
text="Some other error"
)
self.assertRaises(self.purestorage_module.PureHTTPError,
self.driver.delete_cgsnapshot,
mock_context,
mock_cgsnap,
[mock_snap])
self.array.destroy_pgroup.side_effect = \
self.purestorage_module.PureHTTPError(
code=http_client.INTERNAL_SERVER_ERROR,
text="Another different error"
)
self.assertRaises(self.purestorage_module.PureHTTPError,
self.driver.delete_cgsnapshot,
mock_context,
mock_cgsnap,
[mock_snap])
self.array.destroy_pgroup.side_effect = None
self.assert_error_propagates(
[self.array.destroy_pgroup],
self.driver.delete_cgsnapshot,
mock_context,
mock_cgsnap,
[mock_snap]
)
@mock.patch(BASE_DRIVER_OBJ + "._get_pgroup_snap_name",
spec=pure.PureBaseVolumeDriver._get_pgroup_snap_name)
def test_delete_cgsnapshot_eradicate_now(self, mock_get_snap_name):
snap_name = "consisgroup-4a2f7e3a-312a-40c5-96a8-536b8a0f" \
"e074-cinder.4a2f7e3a-312a-40c5-96a8-536b8a0fe075"
mock_get_snap_name.return_value = snap_name
self.mock_config.pure_eradicate_on_delete = True
model_update, snapshots = self.driver.delete_cgsnapshot(mock.Mock(),
mock.Mock(),
[mock.Mock()])
self.array.destroy_pgroup.assert_called_once_with(snap_name)
self.array.eradicate_pgroup.assert_called_once_with(snap_name)
def test_manage_existing(self):
ref_name = 'vol1'
volume_ref = {'name': ref_name}
self.array.list_volume_private_connections.return_value = []
vol_name = VOLUME['name'] + '-cinder'
self.driver.manage_existing(VOLUME, volume_ref)
self.array.list_volume_private_connections.assert_called_with(ref_name)
self.array.rename_volume.assert_called_with(ref_name, vol_name)
def test_manage_existing_error_propagates(self):
self.array.list_volume_private_connections.return_value = []
self.assert_error_propagates(
[self.array.list_volume_private_connections,
self.array.rename_volume],
self.driver.manage_existing,
VOLUME, {'name': 'vol1'}
)
def test_manage_existing_bad_ref(self):
self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing,
VOLUME, {'bad_key': 'bad_value'})
self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing,
VOLUME, {'name': ''})
self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing,
VOLUME, {'name': None})
self.array.get_volume.side_effect = \
self.purestorage_module.PureHTTPError(
text="Volume does not exist.",
code=http_client.BAD_REQUEST
)
self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing,
VOLUME, {'name': 'non-existing-volume'})
def test_manage_existing_with_connected_hosts(self):
ref_name = 'vol1'
self.array.list_volume_private_connections.return_value = \
["host1", "host2"]
self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing,
VOLUME, {'name': ref_name})
self.array.list_volume_private_connections.assert_called_with(ref_name)
self.assertFalse(self.array.rename_volume.called)
def test_manage_existing_get_size(self):
ref_name = 'vol1'
volume_ref = {'name': ref_name}
expected_size = 5
self.array.get_volume.return_value = {"size": 5368709120}
size = self.driver.manage_existing_get_size(VOLUME, volume_ref)
self.assertEqual(expected_size, size)
self.array.get_volume.assert_called_with(ref_name, snap=False)
def test_manage_existing_get_size_error_propagates(self):
self.array.get_volume.return_value = mock.MagicMock()
self.assert_error_propagates([self.array.get_volume],
self.driver.manage_existing_get_size,
VOLUME, {'name': 'vol1'})
def test_manage_existing_get_size_bad_ref(self):
self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing_get_size,
VOLUME, {'bad_key': 'bad_value'})
self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing_get_size,
VOLUME, {'name': ''})
self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing_get_size,
VOLUME, {'name': None})
def test_unmanage(self):
vol_name = VOLUME['name'] + "-cinder"
unmanaged_vol_name = vol_name + "-unmanaged"
self.driver.unmanage(VOLUME)
self.array.rename_volume.assert_called_with(vol_name,
unmanaged_vol_name)
def test_unmanage_error_propagates(self):
self.assert_error_propagates([self.array.rename_volume],
self.driver.unmanage,
VOLUME)
def test_unmanage_with_deleted_volume(self):
vol_name = VOLUME['name'] + "-cinder"
unmanaged_vol_name = vol_name + "-unmanaged"
self.array.rename_volume.side_effect = \
self.purestorage_module.PureHTTPError(
text="Volume does not exist.",
code=http_client.BAD_REQUEST
)
self.driver.unmanage(VOLUME)
self.array.rename_volume.assert_called_with(vol_name,
unmanaged_vol_name)
def test_manage_existing_snapshot(self):
ref_name = PURE_SNAPSHOT['name']
snap_ref = {'name': ref_name}
self.array.get_volume.return_value = [PURE_SNAPSHOT]
self.driver.manage_existing_snapshot(SNAPSHOT, snap_ref)
self.array.rename_volume.assert_called_once_with(ref_name,
SNAPSHOT_PURITY_NAME)
self.array.get_volume.assert_called_with(PURE_SNAPSHOT['source'],
snap=True)
def test_manage_existing_snapshot_multiple_snaps_on_volume(self):
ref_name = PURE_SNAPSHOT['name']
snap_ref = {'name': ref_name}
pure_snaps = [PURE_SNAPSHOT]
for i in range(5):
snap = PURE_SNAPSHOT.copy()
snap['name'] += str(i)
pure_snaps.append(snap)
self.array.get_volume.return_value = pure_snaps
self.driver.manage_existing_snapshot(SNAPSHOT, snap_ref)
self.array.rename_volume.assert_called_once_with(ref_name,
SNAPSHOT_PURITY_NAME)
def test_manage_existing_snapshot_error_propagates(self):
self.array.get_volume.return_value = [PURE_SNAPSHOT]
self.assert_error_propagates(
[self.array.rename_volume],
self.driver.manage_existing_snapshot,
SNAPSHOT, {'name': PURE_SNAPSHOT['name']}
)
def test_manage_existing_snapshot_bad_ref(self):
self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing_snapshot,
SNAPSHOT, {'bad_key': 'bad_value'})
def test_manage_existing_snapshot_empty_ref(self):
self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing_snapshot,
SNAPSHOT, {'name': ''})
def test_manage_existing_snapshot_none_ref(self):
self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing_snapshot,
SNAPSHOT, {'name': None})
def test_manage_existing_snapshot_volume_ref_not_exist(self):
self.array.get_volume.side_effect = \
self.purestorage_module.PureHTTPError(
text="Volume does not exist.",
code=http_client.BAD_REQUEST
)
self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing_snapshot,
SNAPSHOT, {'name': 'non-existing-volume.snap1'})
def test_manage_existing_snapshot_ref_not_exist(self):
ref_name = PURE_SNAPSHOT['name'] + '-fake'
snap_ref = {'name': ref_name}
self.array.get_volume.return_value = [PURE_SNAPSHOT]
self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing_snapshot,
SNAPSHOT, snap_ref)
def test_manage_existing_snapshot_bad_api_version(self):
self.array.get_rest_version.return_value = '1.3'
self.assertRaises(exception.PureDriverException,
self.driver.manage_existing_snapshot,
SNAPSHOT, {'name': PURE_SNAPSHOT['name']})
def test_manage_existing_snapshot_get_size(self):
ref_name = PURE_SNAPSHOT['name']
snap_ref = {'name': ref_name}
self.array.get_volume.return_value = [PURE_SNAPSHOT]
size = self.driver.manage_existing_snapshot_get_size(SNAPSHOT,
snap_ref)
expected_size = 3.0
self.assertEqual(expected_size, size)
self.array.get_volume.assert_called_with(PURE_SNAPSHOT['source'],
snap=True)
def test_manage_existing_snapshot_get_size_error_propagates(self):
self.array.get_volume.return_value = [PURE_SNAPSHOT]
self.assert_error_propagates(
[self.array.get_volume],
self.driver.manage_existing_snapshot_get_size,
SNAPSHOT, {'name': PURE_SNAPSHOT['name']}
)
def test_manage_existing_snapshot_get_size_bad_ref(self):
self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing_snapshot_get_size,
SNAPSHOT, {'bad_key': 'bad_value'})
def test_manage_existing_snapshot_get_size_empty_ref(self):
self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing_snapshot_get_size,
SNAPSHOT, {'name': ''})
def test_manage_existing_snapshot_get_size_none_ref(self):
self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing_snapshot_get_size,
SNAPSHOT, {'name': None})
def test_manage_existing_snapshot_get_size_volume_ref_not_exist(self):
self.array.get_volume.side_effect = \
self.purestorage_module.PureHTTPError(
text="Volume does not exist.",
code=http_client.BAD_REQUEST
)
self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing_snapshot_get_size,
SNAPSHOT, {'name': 'non-existing-volume.snap1'})
def test_manage_existing_snapshot_get_size_bad_api_version(self):
self.array.get_rest_version.return_value = '1.3'
self.assertRaises(exception.PureDriverException,
self.driver.manage_existing_snapshot_get_size,
SNAPSHOT, {'name': PURE_SNAPSHOT['name']})
def test_unmanage_snapshot(self):
unmanaged_snap_name = SNAPSHOT_PURITY_NAME + "-unmanaged"
self.driver.unmanage_snapshot(SNAPSHOT)
self.array.rename_volume.assert_called_with(SNAPSHOT_PURITY_NAME,
unmanaged_snap_name)
def test_unmanage_snapshot_error_propagates(self):
self.assert_error_propagates([self.array.rename_volume],
self.driver.unmanage_snapshot,
SNAPSHOT)
def test_unmanage_snapshot_with_deleted_snapshot(self):
unmanaged_snap_name = SNAPSHOT_PURITY_NAME + "-unmanaged"
self.array.rename_volume.side_effect = \
self.purestorage_module.PureHTTPError(
text="Snapshot does not exist.",
code=http_client.BAD_REQUEST
)
self.driver.unmanage_snapshot(SNAPSHOT)
self.array.rename_volume.assert_called_with(SNAPSHOT_PURITY_NAME,
unmanaged_snap_name)
def test_unmanage_snapshot_bad_api_version(self):
self.array.get_rest_version.return_value = '1.3'
self.assertRaises(exception.PureDriverException,
self.driver.unmanage_snapshot,
SNAPSHOT)
def _test_retype_repl(self, mock_is_repl, is_vol_repl,
repl_cabability, volume_id=None):
mock_is_repl.return_value = is_vol_repl
context = mock.MagicMock()
volume = fake_volume.fake_volume_obj(context)
if volume_id:
volume.id = volume_id
new_type = {
'extra_specs': {
pure.EXTRA_SPECS_REPL_ENABLED:
'<is> ' + str(repl_cabability)
}
}
actual = self.driver.retype(context, volume, new_type, None, None)
expected = (True, None)
self.assertEqual(expected, actual)
return context, volume
def _test_get_manageable_things(self,
pure_objs=MANAGEABLE_PURE_VOLS,
expected_refs=MANAGEABLE_PURE_VOL_REFS,
pure_hosts=list(),
cinder_objs=list(),
is_snapshot=False):
self.array.list_volumes.return_value = pure_objs
self.array.list_hosts.return_value = pure_hosts
marker = mock.Mock()
limit = mock.Mock()
offset = mock.Mock()
sort_keys = mock.Mock()
sort_dirs = mock.Mock()
with mock.patch('cinder.volume.utils.paginate_entries_list') as mpage:
if is_snapshot:
test_func = self.driver.get_manageable_snapshots
else:
test_func = self.driver.get_manageable_volumes
test_func(cinder_objs, marker, limit, offset, sort_keys, sort_dirs)
mpage.assert_called_once_with(
expected_refs,
marker,
limit,
offset,
sort_keys,
sort_dirs
)
def test_get_manageable_volumes(self,):
"""Default success case.
Given a list of pure volumes from the REST API, give back a list
of volume references.
"""
self._test_get_manageable_things(pure_hosts=[PURE_HOST])
def test_get_manageable_volumes_connected_vol(self):
"""Make sure volumes connected to hosts are flagged as unsafe."""
connected_host = deepcopy(PURE_HOST)
connected_host['name'] = 'host2'
connected_host['vol'] = MANAGEABLE_PURE_VOLS[0]['name']
pure_hosts = [PURE_HOST, connected_host]
expected_refs = deepcopy(MANAGEABLE_PURE_VOL_REFS)
expected_refs[0]['safe_to_manage'] = False
expected_refs[0]['reason_not_safe'] = 'Volume connected to host host2.'
self._test_get_manageable_things(expected_refs=expected_refs,
pure_hosts=pure_hosts)
def test_get_manageable_volumes_already_managed(self):
"""Make sure volumes already owned by cinder are flagged as unsafe."""
cinder_vol = fake_volume.fake_volume_obj(mock.MagicMock())
cinder_vol.id = VOLUME_ID
cinders_vols = [cinder_vol]
# Have one of our vol names match up with the existing cinder volume
purity_vols = deepcopy(MANAGEABLE_PURE_VOLS)
purity_vols[0]['name'] = 'volume-' + VOLUME_ID + '-cinder'
expected_refs = deepcopy(MANAGEABLE_PURE_VOL_REFS)
expected_refs[0]['reference'] = {'name': purity_vols[0]['name']}
expected_refs[0]['safe_to_manage'] = False
expected_refs[0]['reason_not_safe'] = 'Volume already managed.'
expected_refs[0]['cinder_id'] = VOLUME_ID
self._test_get_manageable_things(pure_objs=purity_vols,
expected_refs=expected_refs,
pure_hosts=[PURE_HOST],
cinder_objs=cinders_vols)
def test_get_manageable_volumes_no_pure_volumes(self):
"""Expect no refs to be found if no volumes are on Purity."""
self._test_get_manageable_things(pure_objs=[],
expected_refs=[],
pure_hosts=[PURE_HOST])
def test_get_manageable_volumes_no_hosts(self):
"""Success case with no hosts on Purity."""
self._test_get_manageable_things(pure_hosts=[])
def test_get_manageable_snapshots(self):
"""Default success case.
Given a list of pure snapshots from the REST API, give back a list
of snapshot references.
"""
self._test_get_manageable_things(
pure_objs=MANAGEABLE_PURE_SNAPS,
expected_refs=MANAGEABLE_PURE_SNAP_REFS,
pure_hosts=[PURE_HOST],
is_snapshot=True
)
def test_get_manageable_snapshots_already_managed(self):
"""Make sure snaps already owned by cinder are flagged as unsafe."""
cinder_vol = fake_volume.fake_volume_obj(mock.MagicMock())
cinder_vol.id = VOLUME_ID
cinder_snap = fake_snapshot.fake_snapshot_obj(mock.MagicMock())
cinder_snap.id = SNAPSHOT_ID
cinder_snap.volume = cinder_vol
cinder_snaps = [cinder_snap]
purity_snaps = deepcopy(MANAGEABLE_PURE_SNAPS)
purity_snaps[0]['name'] = 'volume-%s-cinder.snapshot-%s' % (
VOLUME_ID, SNAPSHOT_ID
)
expected_refs = deepcopy(MANAGEABLE_PURE_SNAP_REFS)
expected_refs[0]['reference'] = {'name': purity_snaps[0]['name']}
expected_refs[0]['safe_to_manage'] = False
expected_refs[0]['reason_not_safe'] = 'Snapshot already managed.'
expected_refs[0]['cinder_id'] = SNAPSHOT_ID
self._test_get_manageable_things(
pure_objs=purity_snaps,
expected_refs=expected_refs,
cinder_objs=cinder_snaps,
pure_hosts=[PURE_HOST],
is_snapshot=True
)
def test_get_manageable_snapshots_no_pure_snapshots(self):
"""Expect no refs to be found if no snapshots are on Purity."""
self._test_get_manageable_things(pure_objs=[],
expected_refs=[],
pure_hosts=[PURE_HOST],
is_snapshot=True)
@mock.patch(BASE_DRIVER_OBJ + '._is_volume_replicated_type', autospec=True)
def test_retype_repl_to_repl(self, mock_is_replicated_type):
self._test_retype_repl(mock_is_replicated_type, True, True)
@mock.patch(BASE_DRIVER_OBJ + '._is_volume_replicated_type', autospec=True)
def test_retype_non_repl_to_non_repl(self, mock_is_replicated_type):
self._test_retype_repl(mock_is_replicated_type, False, False)
@mock.patch(BASE_DRIVER_OBJ + '._is_volume_replicated_type', autospec=True)
def test_retype_non_repl_to_repl(self, mock_is_replicated_type):
context, volume = self._test_retype_repl(mock_is_replicated_type,
False,
True,
volume_id=VOLUME_ID)
self.array.set_pgroup.assert_called_once_with(
pure.REPLICATION_CG_NAME,
addvollist=[VOLUME_PURITY_NAME]
)
@mock.patch(BASE_DRIVER_OBJ + '._is_volume_replicated_type', autospec=True)
def test_retype_repl_to_non_repl(self, mock_is_replicated_type,):
context, volume = self._test_retype_repl(mock_is_replicated_type,
True,
False,
volume_id=VOLUME_ID)
self.array.set_pgroup.assert_called_once_with(
pure.REPLICATION_CG_NAME,
remvollist=[VOLUME_PURITY_NAME]
)
@mock.patch('cinder.volume.volume_types.get_volume_type')
def test_is_vol_replicated_no_extra_specs(self, mock_get_vol_type):
mock_get_vol_type.return_value = NON_REPLICATED_VOL_TYPE
volume = fake_volume.fake_volume_obj(mock.MagicMock())
actual = self.driver._is_volume_replicated_type(volume)
self.assertFalse(actual)
@mock.patch('cinder.volume.volume_types.get_volume_type')
def test_is_vol_replicated_has_repl_extra_specs(self, mock_get_vol_type):
mock_get_vol_type.return_value = REPLICATED_VOL_TYPE
volume = fake_volume.fake_volume_obj(mock.MagicMock())
volume.volume_type_id = REPLICATED_VOL_TYPE['id']
actual = self.driver._is_volume_replicated_type(volume)
self.assertTrue(actual)
@mock.patch('cinder.volume.volume_types.get_volume_type')
def test_is_vol_replicated_none_type(self, mock_get_vol_type):
mock_get_vol_type.side_effect = exception.InvalidVolumeType(reason='')
volume = fake_volume.fake_volume_obj(mock.MagicMock())
volume.volume_type = None
volume.volume_type_id = None
actual = self.driver._is_volume_replicated_type(volume)
self.assertFalse(actual)
@mock.patch('cinder.volume.volume_types.get_volume_type')
def test_is_vol_replicated_has_other_extra_specs(self, mock_get_vol_type):
vtype_test = deepcopy(NON_REPLICATED_VOL_TYPE)
vtype_test["extra_specs"] = {"some_key": "some_value"}
mock_get_vol_type.return_value = vtype_test
volume = fake_volume.fake_volume_obj(mock.MagicMock())
actual = self.driver._is_volume_replicated_type(volume)
self.assertFalse(actual)
def test_does_pgroup_exist_not_exists(self):
self.array.get_pgroup.side_effect = (
self.purestorage_module.PureHTTPError(code=http_client.BAD_REQUEST,
text="does not exist"))
exists = self.driver._does_pgroup_exist(self.array, "some_pgroup")
self.assertFalse(exists)
def test_does_pgroup_exist_exists(self):
self.array.get_pgroup.side_effect = None
self.array.get_pgroup.return_value = PGROUP_ON_TARGET_NOT_ALLOWED
exists = self.driver._does_pgroup_exist(self.array, "some_pgroup")
self.assertTrue(exists)
def test_does_pgroup_exist_error_propagates(self):
self.assert_error_propagates([self.array.get_pgroup],
self.driver._does_pgroup_exist,
self.array,
"some_pgroup")
@mock.patch(BASE_DRIVER_OBJ + "._does_pgroup_exist")
def test_wait_until_target_group_setting_propagates_ready(self,
mock_exists):
mock_exists.return_value = True
self.driver._wait_until_target_group_setting_propagates(
self.array,
"some_pgroup"
)
@mock.patch(BASE_DRIVER_OBJ + "._does_pgroup_exist")
def test_wait_until_target_group_setting_propagates_not_ready(self,
mock_exists):
mock_exists.return_value = False
self.assertRaises(
exception.PureDriverException,
self.driver._wait_until_target_group_setting_propagates,
self.array,
"some_pgroup"
)
def test_wait_until_source_array_allowed_ready(self):
self.array.get_pgroup.return_value = PGROUP_ON_TARGET_ALLOWED
self.driver._wait_until_source_array_allowed(
self.array,
"some_pgroup",)
def test_wait_until_source_array_allowed_not_ready(self):
self.array.get_pgroup.return_value = PGROUP_ON_TARGET_NOT_ALLOWED
self.assertRaises(
exception.PureDriverException,
self.driver._wait_until_source_array_allowed,
self.array,
"some_pgroup",
)
@mock.patch('cinder.volume.volume_types.get_volume_type')
def test_create_volume_replicated(self, mock_get_volume_type):
mock_get_volume_type.return_value = REPLICATED_VOL_TYPE
self._setup_mocks_for_replication()
self.driver._array = self.array
self.driver._array.array_name = GET_ARRAY_PRIMARY["array_name"]
self.driver._array.array_id = GET_ARRAY_PRIMARY["id"]
self.driver._replication_target_arrays = [mock.Mock()]
self.driver._replication_target_arrays[0].array_name = (
GET_ARRAY_SECONDARY["array_name"])
self.driver.create_volume(VOLUME)
self.array.create_volume.assert_called_with(
VOLUME["name"] + "-cinder", 2 * units.Gi)
self.array.set_pgroup.assert_called_with(
REPLICATION_PROTECTION_GROUP,
addvollist=[VOLUME["name"] + "-cinder"])
def test_find_failover_target_no_repl_targets(self):
self.driver._replication_target_arrays = []
self.assertRaises(exception.PureDriverException,
self.driver._find_failover_target,
None)
@mock.patch(BASE_DRIVER_OBJ + '._get_latest_replicated_pg_snap')
def test_find_failover_target_secondary_specified(self, mock_get_snap):
mock_backend_1 = mock.Mock()
mock_backend_2 = mock.Mock()
secondary_id = 'foo'
mock_backend_2._backend_id = secondary_id
self.driver._replication_target_arrays = [mock_backend_1,
mock_backend_2]
mock_get_snap.return_value = REPLICATED_PGSNAPS[0]
array, pg_snap = self.driver._find_failover_target(secondary_id)
self.assertEqual(mock_backend_2, array)
self.assertEqual(REPLICATED_PGSNAPS[0], pg_snap)
def test_find_failover_target_secondary_specified_not_found(self):
mock_backend = mock.Mock()
mock_backend._backend_id = 'not_foo'
self.driver._replication_target_arrays = [mock_backend]
self.assertRaises(exception.InvalidReplicationTarget,
self.driver._find_failover_target,
'foo')
@mock.patch(BASE_DRIVER_OBJ + '._get_latest_replicated_pg_snap')
def test_find_failover_target_secondary_specified_no_pgsnap(self,
mock_get_snap):
mock_backend = mock.Mock()
secondary_id = 'foo'
mock_backend._backend_id = secondary_id
self.driver._replication_target_arrays = [mock_backend]
mock_get_snap.return_value = None
self.assertRaises(exception.PureDriverException,
self.driver._find_failover_target,
secondary_id)
@mock.patch(BASE_DRIVER_OBJ + '._get_latest_replicated_pg_snap')
def test_find_failover_target_no_secondary_specified(self,
mock_get_snap):
mock_backend_1 = mock.Mock()
mock_backend_2 = mock.Mock()
self.driver._replication_target_arrays = [mock_backend_1,
mock_backend_2]
mock_get_snap.return_value = REPLICATED_PGSNAPS[0]
array, pg_snap = self.driver._find_failover_target(None)
self.assertEqual(mock_backend_1, array)
self.assertEqual(REPLICATED_PGSNAPS[0], pg_snap)
@mock.patch(BASE_DRIVER_OBJ + '._get_latest_replicated_pg_snap')
def test_find_failover_target_no_secondary_specified_missing_pgsnap(
self, mock_get_snap):
mock_backend_1 = mock.Mock()
mock_backend_2 = mock.Mock()
self.driver._replication_target_arrays = [mock_backend_1,
mock_backend_2]
mock_get_snap.side_effect = [None, REPLICATED_PGSNAPS[0]]
array, pg_snap = self.driver._find_failover_target(None)
self.assertEqual(mock_backend_2, array)
self.assertEqual(REPLICATED_PGSNAPS[0], pg_snap)
@mock.patch(BASE_DRIVER_OBJ + '._get_latest_replicated_pg_snap')
def test_find_failover_target_no_secondary_specified_no_pgsnap(
self, mock_get_snap):
mock_backend = mock.Mock()
self.driver._replication_target_arrays = [mock_backend]
mock_get_snap.return_value = None
self.assertRaises(exception.PureDriverException,
self.driver._find_failover_target,
None)
@mock.patch(BASE_DRIVER_OBJ + '._get_latest_replicated_pg_snap')
def test_find_failover_target_error_propagates_secondary_specified(
self, mock_get_snap):
mock_backend = mock.Mock()
mock_backend._backend_id = 'foo'
self.driver._replication_target_arrays = [mock_backend]
self.assert_error_propagates(
[mock_get_snap],
self.driver._find_failover_target,
'foo'
)
@mock.patch(BASE_DRIVER_OBJ + '._get_latest_replicated_pg_snap')
def test_find_failover_target_error_propagates_no_secondary(
self, mock_get_snap):
self.driver._replication_target_arrays = [mock.Mock()]
self.assert_error_propagates(
[mock_get_snap],
self.driver._find_failover_target,
None
)
@mock.patch('cinder.volume.volume_types.get_volume_type')
def test_enable_replication_if_needed_success(
self, mock_get_volume_type):
mock_get_volume_type.return_value = REPLICATED_VOL_TYPE
self.driver._enable_replication_if_needed(self.array, VOLUME)
self.array.set_pgroup.assert_called_with(
self.driver._replication_pg_name,
addvollist=[VOLUME_PURITY_NAME]
)
@mock.patch('cinder.volume.volume_types.get_volume_type')
def test_enable_replication_if_needed_not_repl_type(
self, mock_get_volume_type):
mock_get_volume_type.return_value = NON_REPLICATED_VOL_TYPE
self.driver._enable_replication_if_needed(self.array, VOLUME)
self.assertFalse(self.array.set_pgroup.called)
@mock.patch('cinder.volume.volume_types.get_volume_type')
def test_enable_replication_if_needed_already_repl(
self, mock_get_volume_type):
mock_get_volume_type.return_value = REPLICATED_VOL_TYPE
self.array.set_pgroup.side_effect = FakePureStorageHTTPError(
code=http_client.BAD_REQUEST, text='already belongs to')
self.driver._enable_replication_if_needed(self.array, VOLUME)
self.array.set_pgroup.assert_called_with(
self.driver._replication_pg_name,
addvollist=[VOLUME_PURITY_NAME]
)
@mock.patch('cinder.volume.volume_types.get_volume_type')
def test_enable_replication_if_needed_error_propagates(
self, mock_get_volume_type):
mock_get_volume_type.return_value = REPLICATED_VOL_TYPE
self.driver._enable_replication_if_needed(self.array, VOLUME)
self.assert_error_propagates(
[self.array.set_pgroup],
self.driver._enable_replication,
self.array, VOLUME
)
@mock.patch(BASE_DRIVER_OBJ + '._get_flasharray')
@mock.patch(BASE_DRIVER_OBJ + '._find_failover_target')
def test_failover(self, mock_find_failover_target, mock_get_array):
secondary_device_id = 'foo'
self.array2._backend_id = secondary_device_id
self.driver._replication_target_arrays = [self.array2]
array2_v1_3 = mock.Mock()
array2_v1_3._backend_id = secondary_device_id
array2_v1_3.array_name = GET_ARRAY_SECONDARY['array_name']
array2_v1_3.array_id = GET_ARRAY_SECONDARY['id']
array2_v1_3.version = '1.3'
mock_get_array.return_value = array2_v1_3
target_array = self.array2
target_array.copy_volume = mock.Mock()
mock_find_failover_target.return_value = (
target_array,
REPLICATED_PGSNAPS[1]
)
array2_v1_3.get_volume.return_value = REPLICATED_VOLUME_SNAPS
context = mock.MagicMock()
new_active_id, volume_updates = self.driver.failover_host(
context,
REPLICATED_VOLUME_OBJS,
None
)
self.assertEqual(secondary_device_id, new_active_id)
self.assertEqual([], volume_updates)
calls = []
for snap in REPLICATED_VOLUME_SNAPS:
vol_name = snap['name'].split('.')[-1]
calls.append(mock.call(
snap['name'],
vol_name,
overwrite=True
))
target_array.copy_volume.assert_has_calls(calls, any_order=True)
@mock.patch(BASE_DRIVER_OBJ + '._get_flasharray')
@mock.patch(BASE_DRIVER_OBJ + '._find_failover_target')
def test_failover_error_propagates(self, mock_find_failover_target,
mock_get_array):
mock_find_failover_target.return_value = (
self.array2,
REPLICATED_PGSNAPS[1]
)
array2_v1_3 = mock.Mock()
array2_v1_3.array_name = GET_ARRAY_SECONDARY['array_name']
array2_v1_3.array_id = GET_ARRAY_SECONDARY['id']
array2_v1_3.version = '1.3'
mock_get_array.return_value = array2_v1_3
array2_v1_3.get_volume.return_value = REPLICATED_VOLUME_SNAPS
self.assert_error_propagates(
[mock_find_failover_target,
mock_get_array,
array2_v1_3.get_volume,
self.array2.copy_volume],
self.driver.failover_host,
mock.Mock(), REPLICATED_VOLUME_OBJS, None
)
def test_disable_replication_success(self):
self.driver._disable_replication(VOLUME)
self.array.set_pgroup.assert_called_with(
self.driver._replication_pg_name,
remvollist=[VOLUME_PURITY_NAME]
)
def test_disable_replication_error_propagates(self):
self.assert_error_propagates(
[self.array.set_pgroup],
self.driver._disable_replication,
VOLUME
)
def test_disable_replication_already_disabled(self):
self.array.set_pgroup.side_effect = FakePureStorageHTTPError(
code=http_client.BAD_REQUEST, text='could not be found')
self.driver._disable_replication(VOLUME)
self.array.set_pgroup.assert_called_with(
self.driver._replication_pg_name,
remvollist=[VOLUME_PURITY_NAME]
)
def test_get_flasharray_verify_https(self):
san_ip = '1.2.3.4'
api_token = 'abcdef'
cert_path = '/my/ssl/certs'
self.purestorage_module.FlashArray.return_value = mock.MagicMock()
self.driver._get_flasharray(san_ip,
api_token,
verify_https=True,
ssl_cert_path=cert_path)
self.purestorage_module.FlashArray.assert_called_with(
san_ip,
api_token=api_token,
rest_version=None,
verify_https=True,
ssl_cert=cert_path,
user_agent=self.driver._user_agent,
)
class PureISCSIDriverTestCase(PureDriverTestCase):
def setUp(self):
super(PureISCSIDriverTestCase, self).setUp()
self.mock_config.use_chap_auth = False
self.driver = pure.PureISCSIDriver(configuration=self.mock_config)
self.driver._array = self.array
self.mock_utils = mock.Mock()
self.driver.driver_utils = self.mock_utils
def test_get_host(self):
good_host = PURE_HOST.copy()
good_host.update(iqn=["another-wrong-iqn", INITIATOR_IQN])
bad_host = {"name": "bad-host", "iqn": ["wrong-iqn"]}
self.array.list_hosts.return_value = [bad_host]
real_result = self.driver._get_host(self.array, ISCSI_CONNECTOR)
self.assertIsNone(real_result)
self.array.list_hosts.return_value.append(good_host)
real_result = self.driver._get_host(self.array, ISCSI_CONNECTOR)
self.assertEqual(good_host, real_result)
self.assert_error_propagates([self.array.list_hosts],
self.driver._get_host,
self.array,
ISCSI_CONNECTOR)
@mock.patch(ISCSI_DRIVER_OBJ + "._connect")
@mock.patch(ISCSI_DRIVER_OBJ + "._get_target_iscsi_ports")
def test_initialize_connection(self, mock_get_iscsi_ports,
mock_connection):
mock_get_iscsi_ports.return_value = ISCSI_PORTS
lun = 1
connection = {
"vol": VOLUME["name"] + "-cinder",
"lun": lun,
}
mock_connection.return_value = connection
result = deepcopy(ISCSI_CONNECTION_INFO)
real_result = self.driver.initialize_connection(VOLUME,
ISCSI_CONNECTOR)
self.assertDictEqual(result, real_result)
mock_get_iscsi_ports.assert_called_with()
mock_connection.assert_called_with(VOLUME, ISCSI_CONNECTOR)
self.assert_error_propagates([mock_get_iscsi_ports, mock_connection],
self.driver.initialize_connection,
VOLUME, ISCSI_CONNECTOR)
@mock.patch(ISCSI_DRIVER_OBJ + "._connect")
@mock.patch(ISCSI_DRIVER_OBJ + "._get_target_iscsi_ports")
def test_initialize_connection_with_auth(self, mock_get_iscsi_ports,
mock_connection):
auth_type = "CHAP"
chap_username = ISCSI_CONNECTOR["host"]
chap_password = "password"
mock_get_iscsi_ports.return_value = ISCSI_PORTS
mock_connection.return_value = {
"vol": VOLUME["name"] + "-cinder",
"lun": 1,
"auth_username": chap_username,
"auth_password": chap_password,
}
result = deepcopy(ISCSI_CONNECTION_INFO)
result["data"]["auth_method"] = auth_type
result["data"]["auth_username"] = chap_username
result["data"]["auth_password"] = chap_password
self.mock_config.use_chap_auth = True
# Branch where no credentials were generated
real_result = self.driver.initialize_connection(VOLUME,
ISCSI_CONNECTOR)
mock_connection.assert_called_with(VOLUME, ISCSI_CONNECTOR)
self.assertDictEqual(result, real_result)
self.assert_error_propagates([mock_get_iscsi_ports, mock_connection],
self.driver.initialize_connection,
VOLUME, ISCSI_CONNECTOR)
@mock.patch(ISCSI_DRIVER_OBJ + "._connect")
@mock.patch(ISCSI_DRIVER_OBJ + "._get_target_iscsi_ports")
def test_initialize_connection_multipath(self,
mock_get_iscsi_ports,
mock_connection):
mock_get_iscsi_ports.return_value = ISCSI_PORTS
lun = 1
connection = {
"vol": VOLUME["name"] + "-cinder",
"lun": lun,
}
mock_connection.return_value = connection
multipath_connector = deepcopy(ISCSI_CONNECTOR)
multipath_connector["multipath"] = True
result = deepcopy(ISCSI_CONNECTION_INFO)
real_result = self.driver.initialize_connection(VOLUME,
multipath_connector)
self.assertDictEqual(result, real_result)
mock_get_iscsi_ports.assert_called_with()
mock_connection.assert_called_with(VOLUME, multipath_connector)
multipath_connector["multipath"] = False
self.driver.initialize_connection(VOLUME, multipath_connector)
def test_get_target_iscsi_ports(self):
self.array.list_ports.return_value = ISCSI_PORTS
ret = self.driver._get_target_iscsi_ports()
self.assertEqual(ISCSI_PORTS, ret)
def test_get_target_iscsi_ports_with_iscsi_and_fc(self):
self.array.list_ports.return_value = PORTS_WITH
ret = self.driver._get_target_iscsi_ports()
self.assertEqual(ISCSI_PORTS, ret)
def test_get_target_iscsi_ports_with_no_ports(self):
# Should raise an exception if there are no ports
self.array.list_ports.return_value = []
self.assertRaises(exception.PureDriverException,
self.driver._get_target_iscsi_ports)
def test_get_target_iscsi_ports_with_only_fc_ports(self):
# Should raise an exception of there are no iscsi ports
self.array.list_ports.return_value = PORTS_WITHOUT
self.assertRaises(exception.PureDriverException,
self.driver._get_target_iscsi_ports)
@mock.patch("cinder.volume.utils.generate_password", autospec=True)
@mock.patch(ISCSI_DRIVER_OBJ + "._get_host", autospec=True)
@mock.patch(ISCSI_DRIVER_OBJ + "._generate_purity_host_name", spec=True)
def test_connect(self, mock_generate, mock_host, mock_gen_secret):
vol_name = VOLUME["name"] + "-cinder"
result = {"vol": vol_name, "lun": 1}
# Branch where host already exists
mock_host.return_value = PURE_HOST
self.array.connect_host.return_value = {"vol": vol_name, "lun": 1}
real_result = self.driver._connect(VOLUME, ISCSI_CONNECTOR)
self.assertEqual(result, real_result)
mock_host.assert_called_with(self.driver, self.array, ISCSI_CONNECTOR)
self.assertFalse(mock_generate.called)
self.assertFalse(self.array.create_host.called)
self.array.connect_host.assert_called_with(PURE_HOST_NAME, vol_name)
# Branch where new host is created
mock_host.return_value = None
mock_generate.return_value = PURE_HOST_NAME
real_result = self.driver._connect(VOLUME, ISCSI_CONNECTOR)
mock_host.assert_called_with(self.driver, self.array, ISCSI_CONNECTOR)
mock_generate.assert_called_with(HOSTNAME)
self.array.create_host.assert_called_with(PURE_HOST_NAME,
iqnlist=[INITIATOR_IQN])
self.assertEqual(result, real_result)
mock_generate.reset_mock()
self.array.reset_mock()
self.assert_error_propagates(
[mock_host, mock_generate, self.array.connect_host,
self.array.create_host], self.driver._connect, VOLUME,
ISCSI_CONNECTOR)
self.mock_config.use_chap_auth = True
chap_user = ISCSI_CONNECTOR["host"]
chap_password = "sOmEseCr3t"
# Branch where chap is used and credentials already exist
initiator_data = [{"key": pure.CHAP_SECRET_KEY,
"value": chap_password}]
self.mock_utils.get_driver_initiator_data.return_value = initiator_data
self.driver._connect(VOLUME, ISCSI_CONNECTOR)
result["auth_username"] = chap_user
result["auth_password"] = chap_password
self.assertDictEqual(result, real_result)
self.array.set_host.assert_called_with(PURE_HOST_NAME,
host_user=chap_user,
host_password=chap_password)
# Branch where chap is used and credentials are generated
mock_gen_secret.return_value = chap_password
self.mock_utils.get_driver_initiator_data.return_value = None
self.driver._connect(VOLUME, ISCSI_CONNECTOR)
result["auth_username"] = chap_user
result["auth_password"] = chap_password
self.assertDictEqual(result, real_result)
self.array.set_host.assert_called_with(PURE_HOST_NAME,
host_user=chap_user,
host_password=chap_password)
self.mock_utils.insert_driver_initiator_data.assert_called_with(
ISCSI_CONNECTOR['initiator'],
pure.CHAP_SECRET_KEY,
chap_password
)
@mock.patch(ISCSI_DRIVER_OBJ + "._get_host", autospec=True)
def test_connect_already_connected(self, mock_host):
mock_host.return_value = PURE_HOST
expected = {"host": PURE_HOST_NAME, "lun": 1}
self.array.list_volume_private_connections.return_value = \
[expected, {"host": "extra", "lun": 2}]
self.array.connect_host.side_effect = \
self.purestorage_module.PureHTTPError(
code=http_client.BAD_REQUEST,
text="Connection already exists"
)
actual = self.driver._connect(VOLUME, ISCSI_CONNECTOR)
self.assertEqual(expected, actual)
self.assertTrue(self.array.connect_host.called)
self.assertTrue(self.array.list_volume_private_connections)
@mock.patch(ISCSI_DRIVER_OBJ + "._get_host", autospec=True)
def test_connect_already_connected_list_hosts_empty(self, mock_host):
mock_host.return_value = PURE_HOST
self.array.list_volume_private_connections.return_value = {}
self.array.connect_host.side_effect = \
self.purestorage_module.PureHTTPError(
code=http_client.BAD_REQUEST,
text="Connection already exists"
)
self.assertRaises(exception.PureDriverException, self.driver._connect,
VOLUME, ISCSI_CONNECTOR)
self.assertTrue(self.array.connect_host.called)
self.assertTrue(self.array.list_volume_private_connections)
@mock.patch(ISCSI_DRIVER_OBJ + "._get_host", autospec=True)
def test_connect_already_connected_list_hosts_exception(self, mock_host):
mock_host.return_value = PURE_HOST
self.array.list_volume_private_connections.side_effect = \
self.purestorage_module.PureHTTPError(code=http_client.BAD_REQUEST,
text="")
self.array.connect_host.side_effect = \
self.purestorage_module.PureHTTPError(
code=http_client.BAD_REQUEST,
text="Connection already exists"
)
self.assertRaises(self.purestorage_module.PureHTTPError,
self.driver._connect, VOLUME,
ISCSI_CONNECTOR)
self.assertTrue(self.array.connect_host.called)
self.assertTrue(self.array.list_volume_private_connections)
@mock.patch(ISCSI_DRIVER_OBJ + "._get_chap_secret_from_init_data")
@mock.patch(ISCSI_DRIVER_OBJ + "._get_host", autospec=True)
def test_connect_host_deleted(self, mock_host, mock_get_secret):
mock_host.return_value = None
self.mock_config.use_chap_auth = True
mock_get_secret.return_value = 'abcdef'
self.array.set_host.side_effect = (
self.purestorage_module.PureHTTPError(
code=http_client.BAD_REQUEST, text='Host does not exist.'))
# Because we mocked out retry make sure we are raising the right
# exception to allow for retries to happen.
self.assertRaises(exception.PureRetryableException,
self.driver._connect,
VOLUME, ISCSI_CONNECTOR)
@mock.patch(ISCSI_DRIVER_OBJ + "._get_host", autospec=True)
def test_connect_iqn_already_in_use(self, mock_host):
mock_host.return_value = None
self.array.create_host.side_effect = (
self.purestorage_module.PureHTTPError(
code=http_client.BAD_REQUEST,
text='The specified IQN is already in use.'))
# Because we mocked out retry make sure we are raising the right
# exception to allow for retries to happen.
self.assertRaises(exception.PureRetryableException,
self.driver._connect,
VOLUME, ISCSI_CONNECTOR)
@mock.patch(ISCSI_DRIVER_OBJ + "._get_host", autospec=True)
def test_connect_create_host_already_exists(self, mock_host):
mock_host.return_value = None
self.array.create_host.side_effect = (
self.purestorage_module.PureHTTPError(
code=http_client.BAD_REQUEST, text='Host already exists.'))
# Because we mocked out retry make sure we are raising the right
# exception to allow for retries to happen.
self.assertRaises(exception.PureRetryableException,
self.driver._connect,
VOLUME, ISCSI_CONNECTOR)
@mock.patch(ISCSI_DRIVER_OBJ + "._generate_chap_secret")
def test_get_chap_credentials_create_new(self, mock_generate_secret):
self.mock_utils.get_driver_initiator_data.return_value = []
host = 'host1'
expected_password = 'foo123'
mock_generate_secret.return_value = expected_password
self.mock_utils.insert_driver_initiator_data.return_value = True
username, password = self.driver._get_chap_credentials(host,
INITIATOR_IQN)
self.assertEqual(host, username)
self.assertEqual(expected_password, password)
self.mock_utils.insert_driver_initiator_data.assert_called_once_with(
INITIATOR_IQN, pure.CHAP_SECRET_KEY, expected_password
)
@mock.patch(ISCSI_DRIVER_OBJ + "._generate_chap_secret")
def test_get_chap_credentials_create_new_fail_to_set(self,
mock_generate_secret):
host = 'host1'
expected_password = 'foo123'
mock_generate_secret.return_value = 'badpassw0rd'
self.mock_utils.insert_driver_initiator_data.return_value = False
self.mock_utils.get_driver_initiator_data.side_effect = [
[],
[{'key': pure.CHAP_SECRET_KEY, 'value': expected_password}],
exception.PureDriverException(reason='this should never be hit'),
]
username, password = self.driver._get_chap_credentials(host,
INITIATOR_IQN)
self.assertEqual(host, username)
self.assertEqual(expected_password, password)
class PureFCDriverTestCase(PureDriverTestCase):
def setUp(self):
super(PureFCDriverTestCase, self).setUp()
self.driver = pure.PureFCDriver(configuration=self.mock_config)
self.driver._array = self.array
self.driver._lookup_service = mock.Mock()
def test_get_host(self):
good_host = PURE_HOST.copy()
good_host.update(wwn=["another-wrong-wwn", INITIATOR_WWN])
bad_host = {"name": "bad-host", "wwn": ["wrong-wwn"]}
self.array.list_hosts.return_value = [bad_host]
actual_result = self.driver._get_host(self.array, FC_CONNECTOR)
self.assertIsNone(actual_result)
self.array.list_hosts.return_value.append(good_host)
actual_result = self.driver._get_host(self.array, FC_CONNECTOR)
self.assertEqual(good_host, actual_result)
self.assert_error_propagates([self.array.list_hosts],
self.driver._get_host,
self.array,
FC_CONNECTOR)
def test_get_host_uppercase_wwpn(self):
expected_host = PURE_HOST.copy()
expected_host['wwn'] = [INITIATOR_WWN]
self.array.list_hosts.return_value = [expected_host]
connector = FC_CONNECTOR.copy()
connector['wwpns'] = [wwpn.upper() for wwpn in FC_CONNECTOR['wwpns']]
actual_result = self.driver._get_host(self.array, connector)
self.assertEqual(expected_host, actual_result)
@mock.patch(FC_DRIVER_OBJ + "._connect")
def test_initialize_connection(self, mock_connection):
lookup_service = self.driver._lookup_service
(lookup_service.get_device_mapping_from_network.
return_value) = DEVICE_MAPPING
mock_connection.return_value = {"vol": VOLUME["name"] + "-cinder",
"lun": 1,
}
self.array.list_ports.return_value = FC_PORTS
actual_result = self.driver.initialize_connection(VOLUME, FC_CONNECTOR)
self.assertDictEqual(FC_CONNECTION_INFO, actual_result)
@mock.patch(FC_DRIVER_OBJ + "._get_host", autospec=True)
@mock.patch(FC_DRIVER_OBJ + "._generate_purity_host_name", spec=True)
def test_connect(self, mock_generate, mock_host):
vol_name = VOLUME["name"] + "-cinder"
result = {"vol": vol_name, "lun": 1}
# Branch where host already exists
mock_host.return_value = PURE_HOST
self.array.connect_host.return_value = {"vol": vol_name, "lun": 1}
real_result = self.driver._connect(VOLUME, FC_CONNECTOR)
self.assertEqual(result, real_result)
mock_host.assert_called_with(self.driver, self.array, FC_CONNECTOR)
self.assertFalse(mock_generate.called)
self.assertFalse(self.array.create_host.called)
self.array.connect_host.assert_called_with(PURE_HOST_NAME, vol_name)
# Branch where new host is created
mock_host.return_value = None
mock_generate.return_value = PURE_HOST_NAME
real_result = self.driver._connect(VOLUME, FC_CONNECTOR)
mock_host.assert_called_with(self.driver, self.array, FC_CONNECTOR)
mock_generate.assert_called_with(HOSTNAME)
self.array.create_host.assert_called_with(PURE_HOST_NAME,
wwnlist={INITIATOR_WWN})
self.assertEqual(result, real_result)
mock_generate.reset_mock()
self.array.reset_mock()
self.assert_error_propagates(
[mock_host, mock_generate, self.array.connect_host,
self.array.create_host],
self.driver._connect, VOLUME, FC_CONNECTOR)
@mock.patch(FC_DRIVER_OBJ + "._get_host", autospec=True)
def test_connect_already_connected(self, mock_host):
mock_host.return_value = PURE_HOST
expected = {"host": PURE_HOST_NAME, "lun": 1}
self.array.list_volume_private_connections.return_value = \
[expected, {"host": "extra", "lun": 2}]
self.array.connect_host.side_effect = \
self.purestorage_module.PureHTTPError(
code=http_client.BAD_REQUEST,
text="Connection already exists"
)
actual = self.driver._connect(VOLUME, FC_CONNECTOR)
self.assertEqual(expected, actual)
self.assertTrue(self.array.connect_host.called)
self.assertTrue(self.array.list_volume_private_connections)
@mock.patch(FC_DRIVER_OBJ + "._get_host", autospec=True)
def test_connect_already_connected_list_hosts_empty(self, mock_host):
mock_host.return_value = PURE_HOST
self.array.list_volume_private_connections.return_value = {}
self.array.connect_host.side_effect = \
self.purestorage_module.PureHTTPError(
code=http_client.BAD_REQUEST,
text="Connection already exists"
)
self.assertRaises(exception.PureDriverException, self.driver._connect,
VOLUME, FC_CONNECTOR)
self.assertTrue(self.array.connect_host.called)
self.assertTrue(self.array.list_volume_private_connections)
@mock.patch(FC_DRIVER_OBJ + "._get_host", autospec=True)
def test_connect_already_connected_list_hosts_exception(self, mock_host):
mock_host.return_value = PURE_HOST
self.array.list_volume_private_connections.side_effect = \
self.purestorage_module.PureHTTPError(code=http_client.BAD_REQUEST,
text="")
self.array.connect_host.side_effect = \
self.purestorage_module.PureHTTPError(
code=http_client.BAD_REQUEST,
text="Connection already exists"
)
self.assertRaises(self.purestorage_module.PureHTTPError,
self.driver._connect, VOLUME, FC_CONNECTOR)
self.assertTrue(self.array.connect_host.called)
self.assertTrue(self.array.list_volume_private_connections)
@mock.patch(FC_DRIVER_OBJ + "._get_host", autospec=True)
def test_connect_wwn_already_in_use(self, mock_host):
mock_host.return_value = None
self.array.create_host.side_effect = (
self.purestorage_module.PureHTTPError(
code=http_client.BAD_REQUEST,
text='The specified WWN is already in use.'))
# Because we mocked out retry make sure we are raising the right
# exception to allow for retries to happen.
self.assertRaises(exception.PureRetryableException,
self.driver._connect,
VOLUME, FC_CONNECTOR)
@ddt.ddt
class PureVolumeUpdateStatsTestCase(PureBaseSharedDriverTestCase):
def setUp(self):
super(PureVolumeUpdateStatsTestCase, self).setUp()
self.array.get.side_effect = self.fake_get_array
@ddt.data(dict(used=10,
provisioned=100,
config_ratio=5,
expected_ratio=5,
auto=False),
dict(used=10,
provisioned=100,
config_ratio=5,
expected_ratio=10,
auto=True),
dict(used=0,
provisioned=100,
config_ratio=5,
expected_ratio=5,
auto=True),
dict(used=10,
provisioned=0,
config_ratio=5,
expected_ratio=5,
auto=True))
@ddt.unpack
def test_get_thin_provisioning(self,
used,
provisioned,
config_ratio,
expected_ratio,
auto):
self.mock_config.pure_automatic_max_oversubscription_ratio = auto
self.mock_config.max_over_subscription_ratio = config_ratio
actual_ratio = self.driver._get_thin_provisioning(provisioned, used)
self.assertEqual(expected_ratio, actual_ratio)
@mock.patch(BASE_DRIVER_OBJ + '.get_goodness_function')
@mock.patch(BASE_DRIVER_OBJ + '.get_filter_function')
@mock.patch(BASE_DRIVER_OBJ + '._get_provisioned_space')
@mock.patch(BASE_DRIVER_OBJ + '._get_thin_provisioning')
def test_get_volume_stats(self, mock_get_thin_provisioning, mock_get_space,
mock_get_filter, mock_get_goodness):
filter_function = 'capabilities.total_volumes < 10'
goodness_function = '90'
num_hosts = 20
num_snaps = 175
num_pgroups = 15
reserved_percentage = 12
self.array.list_hosts.return_value = [PURE_HOST] * num_hosts
self.array.list_volumes.return_value = [PURE_SNAPSHOT] * num_snaps
self.array.list_pgroups.return_value = [PURE_PGROUP] * num_pgroups
self.mock_config.reserved_percentage = reserved_percentage
mock_get_space.return_value = (PROVISIONED_CAPACITY * units.Gi, 100)
mock_get_filter.return_value = filter_function
mock_get_goodness.return_value = goodness_function
mock_get_thin_provisioning.return_value = (PROVISIONED_CAPACITY /
USED_SPACE)
expected_result = {
'volume_backend_name': VOLUME_BACKEND_NAME,
'vendor_name': 'Pure Storage',
'driver_version': self.driver.VERSION,
'storage_protocol': None,
'consistencygroup_support': True,
'thin_provisioning_support': True,
'multiattach': False,
'QoS_support': False,
'total_capacity_gb': TOTAL_CAPACITY,
'free_capacity_gb': TOTAL_CAPACITY - USED_SPACE,
'reserved_percentage': reserved_percentage,
'provisioned_capacity': PROVISIONED_CAPACITY,
'max_over_subscription_ratio': (PROVISIONED_CAPACITY /
USED_SPACE),
'filter_function': filter_function,
'goodness_function': goodness_function,
'total_volumes': 100,
'total_snapshots': num_snaps,
'total_hosts': num_hosts,
'total_pgroups': num_pgroups,
'writes_per_sec': PERF_INFO['writes_per_sec'],
'reads_per_sec': PERF_INFO['reads_per_sec'],
'input_per_sec': PERF_INFO['input_per_sec'],
'output_per_sec': PERF_INFO['output_per_sec'],
'usec_per_read_op': PERF_INFO['usec_per_read_op'],
'usec_per_write_op': PERF_INFO['usec_per_write_op'],
'queue_depth': PERF_INFO['queue_depth'],
'replication_enabled': False,
'replication_type': ['async'],
'replication_count': 0,
'replication_targets': [],
}
real_result = self.driver.get_volume_stats(refresh=True)
self.assertDictEqual(expected_result, real_result)
# Make sure when refresh=False we are using cached values and not
# sending additional requests to the array.
self.array.reset_mock()
real_result = self.driver.get_volume_stats(refresh=False)
self.assertDictEqual(expected_result, real_result)
self.assertFalse(self.array.get.called)
self.assertFalse(self.array.list_volumes.called)
self.assertFalse(self.array.list_hosts.called)
self.assertFalse(self.array.list_pgroups.called)
class PureVolumeGroupsTestCase(PureBaseSharedDriverTestCase):
def setUp(self):
super(PureVolumeGroupsTestCase, self).setUp()
self.array.get.side_effect = self.fake_get_array
self.mock_context = mock.Mock()
self.driver.db = mock.Mock()
self.driver.db.group_get = mock.Mock()
@mock.patch('cinder.db.group_get')
@mock.patch(BASE_DRIVER_OBJ + '._add_volume_to_consistency_group')
@mock.patch('cinder.volume.utils.is_group_a_cg_snapshot_type')
def test_add_to_group_if_needed(self, mock_is_cg, mock_add_to_cg,
mock_db_group_get):
mock_is_cg.return_value = False
vol_name = 'foo'
group_id = fake.GROUP_ID
volume = fake_volume.fake_volume_obj(None, group_id=group_id)
group = mock.MagicMock()
mock_db_group_get.return_value = group
self.driver._add_to_group_if_needed(volume, vol_name)
mock_is_cg.assert_called_once_with(group)
mock_add_to_cg.assert_not_called()
@mock.patch('cinder.db.group_get')
@mock.patch(BASE_DRIVER_OBJ + '._add_volume_to_consistency_group')
@mock.patch('cinder.volume.utils.is_group_a_cg_snapshot_type')
def test_add_to_group_if_needed_with_cg(self, mock_is_cg, mock_add_to_cg,
mock_db_group_get):
mock_is_cg.return_value = True
vol_name = 'foo'
group_id = fake.GROUP_ID
volume = fake_volume.fake_volume_obj(None, group_id=group_id)
group = mock.MagicMock()
mock_db_group_get.return_value = group
self.driver._add_to_group_if_needed(volume, vol_name)
mock_is_cg.assert_called_once_with(group)
mock_add_to_cg.assert_called_once_with(
group_id,
vol_name
)
@mock.patch('cinder.volume.utils.is_group_a_cg_snapshot_type')
def test_create_group(self, mock_is_cg):
mock_is_cg.return_value = False
group = fake_group.fake_group_type_obj(None)
self.assertRaises(
NotImplementedError,
self.driver.create_group,
self.mock_context, group
)
mock_is_cg.assert_called_once_with(group)
@mock.patch('cinder.volume.utils.is_group_a_cg_snapshot_type')
def test_delete_group(self, mock_is_cg):
mock_is_cg.return_value = False
group = mock.MagicMock()
volumes = [fake_volume.fake_volume_obj(None)]
self.assertRaises(
NotImplementedError,
self.driver.delete_group,
self.mock_context, group, volumes
)
mock_is_cg.assert_called_once_with(group)
@mock.patch('cinder.volume.utils.is_group_a_cg_snapshot_type')
def test_update_group(self, mock_is_cg):
mock_is_cg.return_value = False
group = mock.MagicMock()
self.assertRaises(
NotImplementedError,
self.driver.update_group,
self.mock_context, group
)
mock_is_cg.assert_called_once_with(group)
@mock.patch('cinder.volume.utils.is_group_a_cg_snapshot_type')
def test_create_group_from_src(self, mock_is_cg):
mock_is_cg.return_value = False
group = mock.MagicMock()
volumes = [fake_volume.fake_volume_obj(None)]
self.assertRaises(
NotImplementedError,
self.driver.create_group_from_src,
self.mock_context, group, volumes
)
mock_is_cg.assert_called_once_with(group)
@mock.patch('cinder.volume.utils.is_group_a_cg_snapshot_type')
def test_create_group_snapshot(self, mock_is_cg):
mock_is_cg.return_value = False
group_snapshot = mock.MagicMock()
snapshots = [fake_snapshot.fake_snapshot_obj(None)]
self.assertRaises(
NotImplementedError,
self.driver.create_group_snapshot,
self.mock_context, group_snapshot, snapshots
)
mock_is_cg.assert_called_once_with(group_snapshot)
@mock.patch('cinder.volume.utils.is_group_a_cg_snapshot_type')
def test_delete_group_snapshot(self, mock_is_cg):
mock_is_cg.return_value = False
group_snapshot = mock.MagicMock()
snapshots = [fake_snapshot.fake_snapshot_obj(None)]
self.assertRaises(
NotImplementedError,
self.driver.create_group_snapshot,
self.mock_context, group_snapshot, snapshots
)
mock_is_cg.assert_called_once_with(group_snapshot)
@mock.patch(BASE_DRIVER_OBJ + '.create_consistencygroup')
@mock.patch('cinder.volume.group_types.get_group_type_specs')
def test_create_group_with_cg(self, mock_get_specs, mock_create_cg):
mock_get_specs.return_value = '<is> True'
group = mock.MagicMock()
self.driver.create_group(self.mock_context, group)
mock_create_cg.assert_called_once_with(self.mock_context, group)
@mock.patch(BASE_DRIVER_OBJ + '.delete_consistencygroup')
@mock.patch('cinder.volume.group_types.get_group_type_specs')
def test_delete_group_with_cg(self, mock_get_specs, mock_delete_cg):
mock_get_specs.return_value = '<is> True'
group = mock.MagicMock()
volumes = [fake_volume.fake_volume_obj(None)]
self.driver.delete_group(self.mock_context, group, volumes)
mock_delete_cg.assert_called_once_with(self.mock_context,
group,
volumes)
@mock.patch(BASE_DRIVER_OBJ + '.update_consistencygroup')
@mock.patch('cinder.volume.group_types.get_group_type_specs')
def test_update_group_with_cg(self, mock_get_specs, mock_update_cg):
mock_get_specs.return_value = '<is> True'
group = mock.MagicMock()
addvollist = [mock.Mock()]
remvollist = [mock.Mock()]
self.driver.update_group(
self.mock_context,
group,
addvollist,
remvollist
)
mock_update_cg.assert_called_once_with(
self.mock_context,
group,
addvollist,
remvollist
)
@mock.patch(BASE_DRIVER_OBJ + '.create_consistencygroup_from_src')
@mock.patch('cinder.volume.group_types.get_group_type_specs')
def test_create_group_from_src_with_cg(self, mock_get_specs, mock_create):
mock_get_specs.return_value = '<is> True'
group = mock.MagicMock()
volumes = [mock.Mock()]
group_snapshot = mock.Mock()
snapshots = [mock.Mock()]
source_group = mock.MagicMock()
source_vols = [mock.Mock()]
self.driver.create_group_from_src(
self.mock_context,
group,
volumes,
group_snapshot,
snapshots,
source_group,
source_vols
)
mock_create.assert_called_once_with(
self.mock_context,
group,
volumes,
group_snapshot,
snapshots,
source_group,
source_vols
)
@mock.patch(BASE_DRIVER_OBJ + '.create_cgsnapshot')
@mock.patch('cinder.volume.group_types.get_group_type_specs')
def test_create_group_snapshot_with_cg(self, mock_get_specs,
mock_create_cgsnap):
mock_get_specs.return_value = '<is> True'
group_snapshot = mock.MagicMock()
snapshots = [mock.Mock()]
self.driver.create_group_snapshot(
self.mock_context,
group_snapshot,
snapshots
)
mock_create_cgsnap.assert_called_once_with(
self.mock_context,
group_snapshot,
snapshots
)
@mock.patch(BASE_DRIVER_OBJ + '.delete_cgsnapshot')
@mock.patch('cinder.volume.group_types.get_group_type_specs')
def test_delete_group_snapshot_with_cg(self, mock_get_specs,
mock_delete_cg):
mock_get_specs.return_value = '<is> True'
group_snapshot = mock.MagicMock()
snapshots = [mock.Mock()]
self.driver.delete_group_snapshot(
self.mock_context,
group_snapshot,
snapshots
)
mock_delete_cg.assert_called_once_with(
self.mock_context,
group_snapshot,
snapshots
)
| __init__ |
__init__.py | __version__ = "0.1.0a23"
__all__ = ["install"] | """A configurable Python package backed by Pyodide's micropip"""
from .piplite import install
|
|
v5.go | package uuid
import "crypto/sha1"
// GenerateV5 generates a version 5 UUID based on a namespace UUID and additional data.
func | (ns UUID, data []byte) (UUID, error) {
guid, err := hashUUID(sha1.New(), ns, data)
if err != nil {
return Null, err
}
return guid.withVersion(Version5), nil
}
// V5 returns a version 5 UUID or panics otherwise.
func V5(ns UUID, data []byte) UUID {
return uuidOrPanic(GenerateV5(ns, data))
}
| GenerateV5 |
com.py | # Copyright (C) 2019 Intel Corporation. All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
#
import scenario_cfg_lib
import launch_cfg_lib
import common
import pt
def is_nuc_whl_linux(names, vmid):
uos_type = names['uos_types'][vmid]
board_name = names['board_name']
if launch_cfg_lib.is_linux_like(uos_type) and board_name not in ("apl-mrb", "apl-up2"):
return True
return False
def is_mount_needed(virt_io, vmid):
if True in launch_cfg_lib.MOUNT_FLAG_DIC[vmid]:
return True
return False
def tap_uos_net(names, virt_io, vmid, config):
uos_type = names['uos_types'][vmid]
board_name = names['board_name']
vm_name = common.undline_name(uos_type).lower()
if launch_cfg_lib.is_linux_like(uos_type) or uos_type in ("ANDROID", "ALIOS"):
i = 0
for mount_flag in launch_cfg_lib.MOUNT_FLAG_DIC[vmid]:
if not mount_flag:
i += 1
continue
blk = virt_io['block'][vmid][i]
rootfs_img = blk.split(':')[1].strip(':')
print('if [ ! -f "/data{}/{}" ]; then'.format(i, rootfs_img), file=config)
print(' echo "no /data{}/{}, exit"'.format(i, rootfs_img), file=config)
print(" exit", file=config)
print("fi", file=config)
print("", file=config)
i += 1
print("#vm-name used to generate uos-mac address", file=config)
print("mac=$(cat /sys/class/net/e*/address)", file=config)
print("vm_name=post_vm_id$1", file=config)
print("mac_seed=${mac:0:17}-${vm_name}", file=config)
print("", file=config)
for net in virt_io['network'][vmid]:
if net:
net_name = net
if ',' in net:
net_name = net.split(',')[0]
print("tap_net tap_{}".format(net_name), file=config)
print("#check if the vm is running or not", file=config)
print("vm_ps=$(pgrep -a -f acrn-dm)", file=config)
print('result=$(echo $vm_ps | grep -w "${vm_name}")', file=config)
print('if [[ "$result" != "" ]]; then', file=config)
print(' echo "$vm_name is running, can\'t create twice!"', file=config)
print(" exit", file=config)
print("fi", file=config)
print("", file=config)
def off_line_cpus(args, vmid, uos_type, config):
"""
:param args: the dictionary of argument for acrn-dm
:param vmid: ID of the vm
:param uos_type: the type of UOS
:param config: it is a file pointer to write offline cpu information
"""
pcpu_id_list = get_cpu_affinity_list(args["cpu_affinity"], vmid)
if not pcpu_id_list:
sos_vmid = launch_cfg_lib.get_sos_vmid()
cpu_affinity = common.get_leaf_tag_map(common.SCENARIO_INFO_FILE, "cpu_affinity", "pcpu_id")
pcpu_id_list = get_cpu_affinity_list(cpu_affinity, sos_vmid+vmid)
if not pcpu_id_list:
key = "scenario config error"
launch_cfg_lib.ERR_LIST[key] = "No available cpu to offline and pass it to vm {}".format(vmid)
print("# offline pinned vCPUs from SOS before launch UOS", file=config)
print('cpu_path="/sys/devices/system/cpu"', file=config)
print("for i in `ls ${cpu_path}`; do", file=config)
print(" for j in {}; do".format(' '.join([str(i) for i in pcpu_id_list])), file=config)
print(' if [ "cpu"$j = $i ]; then', file=config)
print(' online=`cat ${cpu_path}/$i/online`', file=config)
print(' idx=`echo $i | tr -cd "[1-99]"`', file=config)
print(' echo $i online=$online', file=config)
print(' if [ "$online" = "1" ]; then', file=config)
print(" echo 0 > ${cpu_path}/$i/online", file=config)
print(" online=`cat ${cpu_path}/$i/online`", file=config)
print(" # during boot time, cpu hotplug may be disabled by pci_device_probe during a pci module insmod", file=config)
print(' while [ "$online" = "1" ]; do', file=config)
print(" sleep 1", file=config)
print(" echo 0 > ${cpu_path}/$i/online", file=config)
print(" online=`cat ${cpu_path}/$i/online`", file=config)
print(" done", file=config)
print(" echo $idx > /sys/devices/virtual/misc/acrn_hsm/remove_cpu", file=config)
print(" fi", file=config)
print(" fi", file=config)
print(" done", file=config)
print("done", file=config)
print("", file=config)
def run_container(board_name, uos_type, config):
"""
The container contains the clearlinux as rootfs
:param board_name: board name
:param uos_type: the os name of user os
:param config: the file pointer to store the information
"""
# the runC.json is store in the path under board name, but for nuc7i7dnb/nuc6cayh/kbl-nuc-i7 is under nuc/
if 'nuc' in board_name:
board_name = 'nuc'
if board_name not in ("apl-mrb", "nuc") or not launch_cfg_lib.is_linux_like(uos_type):
return
print("function run_container()", file=config)
print("{", file=config)
print("vm_name=vm1", file=config)
print('config_src="/usr/share/acrn/samples/{}/runC.json"'.format(board_name), file=config)
print('shell="/usr/share/acrn/conf/add/$vm_name.sh"', file=config)
print('arg_file="/usr/share/acrn/conf/add/$vm_name.args"', file=config)
print('runc_bundle="/usr/share/acrn/conf/add/runc/$vm_name"', file=config)
print('rootfs_dir="/usr/share/acrn/conf/add/runc/rootfs"', file=config)
print('config_dst="$runc_bundle/config.json"', file=config)
print("", file=config)
print("", file=config)
print("input=$(runc list -f table | awk '{print $1}''{print $3}')", file=config)
print("arr=(${input// / })", file=config)
print("", file=config)
print("for((i=0;i<${#arr[@]};i++))", file=config)
print("do", file=config)
print(' if [ "$vm_name" = "${arr[$i]}" ]; then', file=config)
print(' if [ "running" = "${arr[$i+1]}" ]; then', file=config)
print(' echo "runC instance ${arr[$i]} is running"', file=config)
print(" exit", file=config)
print(" else", file=config)
print(" runc kill ${arr[$i]}", file=config)
print(" runc delete ${arr[$i]}", file=config)
print(" fi", file=config)
print(" fi", file=config)
print("done", file=config)
print("vmsts=$(acrnctl list)", file=config)
print("vms=(${vmsts// / })", file=config)
print("for((i=0;i<${#vms[@]};i++))", file=config)
print("do", file=config)
print(' if [ "$vm_name" = "${vms[$i]}" ]; then', file=config)
print(' if [ "stopped" != "${vms[$i+1]}" ]; then', file=config)
print(' echo "Uos ${vms[$i]} ${vms[$i+1]}"', file=config)
print(" acrnctl stop ${vms[$i]}", file=config)
print(" fi", file=config)
print(" fi", file=config)
print("done", file=config)
dst_str = """ cp "$config_src" "$config_dst"
args=$(sed '{s/-C//g;s/^[ \\t]*//g;s/^/\\"/;s/ /\\",\\"/g;s/$/\\"/}' ${arg_file})
sed -i "s|\\"sh\\"|\\"$shell\\", $args|" $config_dst"""
print('', file=config)
print('if [ ! -f "$shell" ]; then', file=config)
print(' echo "Pls add the vm at first!"', file=config)
print(' exit', file=config)
print('fi', file=config)
print('', file=config)
print('if [ ! -f "$arg_file" ]; then', file=config)
print(' echo "Pls add the vm args!"', file=config)
print(' exit', file=config)
print('fi', file=config)
print('', file=config)
print('if [ ! -d "$rootfs_dir" ]; then', file=config)
print(' mkdir -p "$rootfs_dir"', file=config)
print('fi', file=config)
print('if [ ! -d "$runc_bundle" ]; then', file=config)
print(' mkdir -p "$runc_bundle"', file=config)
print('fi', file=config)
print('if [ ! -f "$config_dst" ]; then', file=config)
print('{}'.format(dst_str), file=config)
print('fi', file=config)
print('runc run --bundle $runc_bundle -d $vm_name', file=config)
print('echo "The runC container is running in backgroud"', file=config)
print('echo "\'#runc exec <vmname> bash\' to login the container bash"', file=config)
print('exit', file=config)
print('}', file=config)
print('', file=config)
def boot_image_type(args, vmid, config):
if not args['vbootloader'][vmid] or (args['vbootloader'][vmid] and args['vbootloader'][vmid] != "vsbl"):
return
print('boot_dev_flag=",b"', file=config)
print("if [ $4 == 1 ];then", file=config)
print(' boot_image_option="--vsbl /usr/share/acrn/bios/VSBL_debug.bin"', file=config)
print("else", file=config)
print(' boot_image_option="--vsbl /usr/share/acrn/bios/VSBL.bin"', file=config)
print("fi", file=config)
print("", file=config)
def interrupt_storm(pt_sel, config):
if not pt_sel:
return
# TODO: --intr_monitor should be configurable by user
print("#interrupt storm monitor for pass-through devices, params order:", file=config) |
def gvt_arg_set(dm, vmid, uos_type, config):
gvt_args = dm['gvt_args'][vmid]
if gvt_args == "gvtd":
bus = int(launch_cfg_lib.GPU_BDF.split(':')[0], 16)
dev = int(launch_cfg_lib.GPU_BDF.split('.')[0].split(':')[1], 16)
fun = int(launch_cfg_lib.GPU_BDF.split('.')[1], 16)
print(' -s 2,passthru,{}/{}/{},gpu \\'.format(bus, dev, fun), file=config)
elif gvt_args:
print(' -s 2,pci-gvt -G "$2" \\', file=config)
def log_level_set(uos_type, config):
print("#logger_setting, format: logger_name,level; like following", file=config)
print('logger_setting="--logger_setting console,level=4;kmsg,level=3;disk,level=5"', file=config)
print("", file=config)
def tap_network(virt_io, vmid, config):
none_i = 0
tap_net_list = virt_io['network'][vmid]
for net in tap_net_list:
if net == None:
none_i += 1
tap_net_num = len(tap_net_list) - none_i
if tap_net_num >= 1:
print("function tap_net() {", file=config)
print("# create a unique tap device for each VM", file=config)
print("tap=$1", file=config)
print('tap_exist=$(ip a | grep "$tap" | awk \'{print $1}\')', file=config)
print('if [ "$tap_exist"x != "x" ]; then', file=config)
print(' echo "tap device existed, reuse $tap"', file=config)
print("else", file=config)
print(" ip tuntap add dev $tap mode tap", file=config)
print("fi", file=config)
print("", file=config)
print("# if acrn-br0 exists, add VM's unique tap device under it", file=config)
print("br_exist=$(ip a | grep acrn-br0 | awk '{print $1}')", file=config)
print('if [ "$br_exist"x != "x" -a "$tap_exist"x = "x" ]; then', file=config)
print(' echo "acrn-br0 bridge aleady exists, adding new tap device to it..."', file=config)
print(' ip link set "$tap" master acrn-br0', file=config)
print(' ip link set dev "$tap" down', file=config)
print(' ip link set dev "$tap" up', file=config)
print("fi", file=config)
print("}", file=config)
print("", file=config)
def launch_begin(names, virt_io, vmid, config):
board_name = names['board_name']
uos_type = names['uos_types'][vmid]
launch_uos = common.undline_name(uos_type).lower()
tap_network(virt_io, vmid, config)
run_container(board_name, uos_type, config)
print("function launch_{}()".format(launch_uos), file=config)
print("{", file=config)
def wa_usage(uos_type, config):
if uos_type in ("ANDROID", "ALIOS"):
print("# WA for USB role switch hang issue, disable runtime PM of xHCI device", file=config)
print("echo on > /sys/devices/pci0000:00/0000:00:15.0/power/control", file=config)
print("", file=config)
def mem_size_set(args, vmid, config):
mem_size = args['mem_size'][vmid]
print("mem_size={}M".format(mem_size), file=config)
def uos_launch(names, args, virt_io, vmid, config):
gvt_args = args['gvt_args'][vmid]
uos_type = names['uos_types'][vmid]
launch_uos = common.undline_name(uos_type).lower()
board_name = names['board_name']
if 'nuc' in board_name:
board_name = 'nuc'
if uos_type == "CLEARLINUX" and board_name in ("apl-mrb", "nuc"):
print('if [ "$1" = "-C" ];then', file=config)
print(' if [ $(hostname) = "runc" ]; then', file=config)
print(' echo "Already in container exit!"', file=config)
print(" exit", file=config)
print(" fi", file=config)
print(' echo "runc_container"', file=config)
print(" run_container", file=config)
if board_name == "apl-mrb":
print(" exit", file=config)
print("fi", file=config)
if is_mount_needed(virt_io, vmid):
print("", file=config)
if gvt_args == "gvtd" or not gvt_args:
print('launch_{} {} "{}" $debug'.format(launch_uos, vmid, vmid), file=config)
else:
print('launch_{} {} "{}" "{}" $debug'.format(launch_uos, vmid, gvt_args, vmid), file=config)
print("", file=config)
i = 0
for mount_flag in launch_cfg_lib.MOUNT_FLAG_DIC[vmid]:
if not mount_flag:
i += 1
continue
print("umount /data{}".format(i), file=config)
i += 1
else:
print("else", file=config)
if gvt_args == "gvtd" or not gvt_args:
print(' launch_{} {}'.format(launch_uos, vmid), file=config)
elif gvt_args:
print(' launch_{} {} "{}"'.format(launch_uos, vmid, gvt_args), file=config)
print("fi", file=config)
return
elif not is_mount_needed(virt_io, vmid):
if gvt_args == "gvtd" or not gvt_args:
print('launch_{} {}'.format(launch_uos, vmid), file=config)
else:
print('launch_{} {} "{}"'.format(launch_uos, vmid, gvt_args), file=config)
else:
print("", file=config)
if gvt_args == "gvtd" or not gvt_args:
print('launch_{} {} "{}" $debug'.format(launch_uos, vmid, vmid), file=config)
else:
print('launch_{} {} "{}" "{}" $debug'.format(launch_uos, vmid, gvt_args, vmid), file=config)
print("", file=config)
i = 0
for mount_flag in launch_cfg_lib.MOUNT_FLAG_DIC[vmid]:
if not mount_flag:
i += 1
continue
print("umount /data{}".format(i), file=config)
i += 1
def launch_end(names, args, virt_io, vmid, config):
board_name = names['board_name']
uos_type = names['uos_types'][vmid]
mem_size = args["mem_size"][vmid]
if uos_type in ("CLEARLINUX", "ANDROID", "ALIOS") and not is_nuc_whl_linux(names, vmid):
print("debug=0", file=config)
print("", file=config)
print('while getopts "hdC" opt', file=config)
print("do", file=config)
print(" case $opt in", file=config)
print(" d) debug=1", file=config)
print(" ;;", file=config)
print(" C)", file=config)
print(" ;;", file=config)
print(" h) help", file=config)
print(" exit 1", file=config)
print(" ;;", file=config)
print(" ?) help", file=config)
print(" exit 1", file=config)
print(" ;;", file=config)
print(" esac", file=config)
print("done", file=config)
print("", file=config)
if is_mount_needed(virt_io, vmid):
i = 0
for mount_flag in launch_cfg_lib.MOUNT_FLAG_DIC[vmid]:
if not mount_flag:
i += 1
continue
blk = virt_io['block'][vmid][i]
root_fs = blk.split(':')[0]
print('if [ ! -b "{}" ]; then'.format(root_fs), file=config)
print(' echo "no {} data partition, exit"'.format(root_fs), file=config)
print(" exit", file=config)
print("fi", file=config)
print("mkdir -p /data{}".format(i), file=config)
print("mount {} /data{}".format(root_fs, i), file=config)
print("", file=config)
i += 1
sos_vmid = launch_cfg_lib.get_sos_vmid()
if args['cpu_sharing'] == "SCHED_NOOP" or common.VM_TYPES[vmid+sos_vmid] == "POST_RT_VM":
off_line_cpus(args, vmid, uos_type, config)
uos_launch(names, args, virt_io, vmid, config)
def set_dm_pt(names, sel, vmid, config, dm):
uos_type = names['uos_types'][vmid]
if sel.bdf['usb_xdci'][vmid] and sel.slot['usb_xdci'][vmid]:
sub_attr = ''
if uos_type == "WINDOWS":
sub_attr = ',d3hot_reset'
print(' -s {},passthru,{}/{}/{}{} \\'.format(sel.slot["usb_xdci"][vmid], sel.bdf["usb_xdci"][vmid][0:2],\
sel.bdf["usb_xdci"][vmid][3:5], sel.bdf["usb_xdci"][vmid][6:7], sub_attr), file=config)
# pass through audio/audio_codec
if sel.bdf['audio'][vmid]:
print(" $boot_audio_option \\", file=config)
if sel.bdf['cse'][vmid] and sel.slot['cse'][vmid]:
print(" $boot_cse_option \\", file=config)
if sel.bdf["sd_card"][vmid] and sel.slot['sd_card'][vmid]:
print(' -s {},passthru,{}/{}/{} \\'.format(sel.slot["sd_card"][vmid], sel.bdf["sd_card"][vmid][0:2], \
sel.bdf["sd_card"][vmid][3:5], sel.bdf["sd_card"][vmid][6:7]), file=config)
if sel.bdf['bluetooth'][vmid] and sel.slot['bluetooth'][vmid]:
print(' -s {},passthru,{}/{}/{} \\'.format(sel.slot["bluetooth"][vmid], sel.bdf["bluetooth"][vmid][0:2], \
sel.bdf["bluetooth"][vmid][3:5], sel.bdf["bluetooth"][vmid][6:7]), file=config)
if sel.bdf['wifi'][vmid] and sel.slot['wifi'][vmid]:
if uos_type == "ANDROID":
print(" -s {},passthru,{}/{}/{},keep_gsi \\".format(sel.slot["wifi"][vmid], sel.bdf["wifi"][vmid][0:2], \
sel.bdf["wifi"][vmid][3:5], sel.bdf["wifi"][vmid][6:7]), file=config)
else:
print(" -s {},passthru,{}/{}/{} \\".format(sel.slot["wifi"][vmid], sel.bdf["wifi"][vmid][0:2], \
sel.bdf["wifi"][vmid][3:5], sel.bdf["wifi"][vmid][6:7]), file=config)
if sel.bdf['ipu'][vmid] or sel.bdf['ipu_i2c'][vmid]:
print(" $boot_ipu_option \\", file=config)
if sel.bdf['ethernet'][vmid] and sel.slot['ethernet'][vmid]:
if vmid in dm["enable_ptm"] and dm["enable_ptm"][vmid] == 'y':
print(" -s {},passthru,{}/{}/{},enable_ptm \\".format(sel.slot["ethernet"][vmid], sel.bdf["ethernet"][vmid][0:2], \
sel.bdf["ethernet"][vmid][3:5], sel.bdf["ethernet"][vmid][6:7]), file=config)
else:
print(" -s {},passthru,{}/{}/{} \\".format(sel.slot["ethernet"][vmid], sel.bdf["ethernet"][vmid][0:2], \
sel.bdf["ethernet"][vmid][3:5], sel.bdf["ethernet"][vmid][6:7]), file=config)
if sel.bdf['sata'] and sel.slot["sata"][vmid]:
print(" -s {},passthru,{}/{}/{} \\".format(sel.slot["sata"][vmid], sel.bdf["sata"][vmid][0:2], \
sel.bdf["sata"][vmid][3:5], sel.bdf["sata"][vmid][6:7]), file=config)
if sel.bdf['nvme'] and sel.slot["nvme"][vmid]:
print(" -s {},passthru,{}/{}/{} \\".format(sel.slot["nvme"][vmid], sel.bdf["nvme"][vmid][0:2], \
sel.bdf["nvme"][vmid][3:5], sel.bdf["nvme"][vmid][6:7]), file=config)
def vboot_arg_set(dm, vmid, config):
"""
Set the argument of vbootloader
:param dm: the dictionary of argument for acrn-dm
:param vmid: ID of the vm
:param config: it is a file pointer to write vboot loader information
:return: None
"""
# TODO: Support to generate '-k' xml config from webUI and to parse it
if dm['vbootloader'][vmid] == "ovmf":
print(" --ovmf /usr/share/acrn/bios/OVMF.fd \\", file=config)
elif dm['vbootloader'][vmid] == "vsbl":
print(" $boot_image_option \\",file=config)
def xhci_args_set(dm, vmid, config):
# usb_xhci set, the value is string
if dm['xhci'][vmid]:
print(" -s {},xhci,{} \\".format(
launch_cfg_lib.virtual_dev_slot("xhci"), dm['xhci'][vmid]), file=config)
def shm_arg_set(dm, vmid, config):
if dm['shm_enabled'] == "n":
return
for shm_region in dm["shm_regions"][vmid]:
print(" -s {},ivshmem,{} \\".format(
launch_cfg_lib.virtual_dev_slot("shm_region_{}".format(shm_region)), shm_region), file=config)
def virtio_args_set(dm, virt_io, vmid, config):
# virtio-input set, the value type is a list
for input_val in virt_io['input'][vmid]:
if input_val:
print(" -s {},virtio-input,{} \\".format(
launch_cfg_lib.virtual_dev_slot("virtio-input{}".format(input_val)), input_val), file=config)
# virtio-blk set, the value type is a list
i = 0
for mount_flag in launch_cfg_lib.MOUNT_FLAG_DIC[vmid]:
blk = virt_io['block'][vmid][i]
if not mount_flag:
if blk:
rootfs_img = blk.strip(':')
print(" -s {},virtio-blk,{} \\".format(launch_cfg_lib.virtual_dev_slot("virtio-blk{}".format(blk)), rootfs_img), file=config)
i += 1
continue
rootfs_img = blk.split(':')[1].strip(':')
print(" -s {},virtio-blk,/data{}/{} \\".format(launch_cfg_lib.virtual_dev_slot("blk_mount_{}".format(i)), i, rootfs_img), file=config)
i += 1
# virtio-net set, the value type is a list
for net in virt_io['network'][vmid]:
if net:
print(" -s {},virtio-net,tap_{} \\".format(launch_cfg_lib.virtual_dev_slot("virtio-net{}".format(net)), net), file=config)
# virtio-console set, the value type is a string
if virt_io['console'][vmid]:
print(" -s {},virtio-console,{} \\".format(
launch_cfg_lib.virtual_dev_slot("virtio-console"),
virt_io['console'][vmid]), file=config)
def get_cpu_affinity_list(cpu_affinity, vmid):
pcpu_id_list = ''
for uos_id,cpus in cpu_affinity.items():
if vmid == uos_id:
pcpu_id_list = [id for id in list(cpu_affinity[uos_id]) if id != None]
return pcpu_id_list
def pcpu_arg_set(dm, vmid, config):
if dm['cpu_sharing'] == "SCHED_NOOP":
return
pcpu_id_list = get_cpu_affinity_list(dm["cpu_affinity"], vmid)
if pcpu_id_list:
print(" --cpu_affinity {} \\".format(','.join(pcpu_id_list)), file=config)
def dm_arg_set(names, sel, virt_io, dm, vmid, config):
uos_type = names['uos_types'][vmid]
board_name = names['board_name']
boot_image_type(dm, vmid, config)
# uuid get
sos_vmid = launch_cfg_lib.get_sos_vmid()
scenario_uuid = launch_cfg_lib.get_scenario_uuid(vmid, sos_vmid)
# clearlinux/android/alios
print('acrn-dm -A -m $mem_size -s 0:0,hostbridge -U {} \\'.format(scenario_uuid), file=config)
if launch_cfg_lib.is_linux_like(uos_type) or uos_type in ("ANDROID", "ALIOS"):
if uos_type in ("ANDROID", "ALIOS"):
print(' $npk_virt \\', file=config)
print(" -s {},virtio-rpmb \\".format(launch_cfg_lib.virtual_dev_slot("virtio-rpmb")), file=config)
print(" --enable_trusty \\", file=config)
# mac_seed
print(" --mac_seed $mac_seed \\", file=config)
if dm['rtos_type'][vmid] != "no":
if virt_io:
print(" --virtio_poll 1000000 \\", file=config)
if dm['rtos_type'][vmid] == "Soft RT":
print(" --rtvm \\", file=config)
if dm['rtos_type'][vmid] == "Hard RT":
print(" --lapic_pt \\", file=config)
# windows
if uos_type == "WINDOWS":
print(" --windows \\", file=config)
# pm_channel set
if dm['pm_channel'][vmid] and dm['pm_channel'][vmid] != None:
pm_key = dm['pm_channel'][vmid]
pm_vuart = "--pm_notify_channel uart"
if vmid in dm["allow_trigger_s5"] and dm["allow_trigger_s5"][vmid] == 'y':
pm_vuart = pm_vuart + ",allow_trigger_s5 "
else:
pm_vuart = pm_vuart + " "
if pm_key == "vuart1(tty)":
vuart_base = launch_cfg_lib.get_vuart1_from_scenario(sos_vmid + vmid)
if vuart_base == "INVALID_COM_BASE":
err_key = "uos:id={}:poweroff_channel".format(vmid)
launch_cfg_lib.ERR_LIST[err_key] = "vuart1 of VM{} in scenario file should select 'SOS_COM2_BASE'".format(sos_vmid + vmid)
return
scenario_cfg_lib.get_sos_vuart_settings()
print(" {} \\".format(pm_vuart + launch_cfg_lib.PM_CHANNEL_DIC[pm_key] + scenario_cfg_lib.SOS_UART1_VALID_NUM), file=config)
elif pm_key == "vuart1(pty)":
print(" {} \\".format(pm_vuart + launch_cfg_lib.PM_CHANNEL_DIC[pm_key]), file=config)
else:
print(" {} \\".format(launch_cfg_lib.PM_CHANNEL_DIC[pm_key]), file=config)
# set logger_setting for all VMs
print(" $logger_setting \\", file=config)
# XHCI args set
xhci_args_set(dm, vmid, config)
# VIRTIO args set
virtio_args_set(dm, virt_io, vmid, config)
# GVT args set
gvt_arg_set(dm, vmid, uos_type, config)
# vbootloader setting
vboot_arg_set(dm, vmid, config)
# pcpu-list args set
pcpu_arg_set(dm, vmid, config)
# shm regions args set
shm_arg_set(dm, vmid, config)
# ssram set
ssram_enabled = 'n'
try:
ssram_enabled = common.get_hv_item_tag(common.SCENARIO_INFO_FILE, "FEATURES", "SSRAM", "SSRAM_ENABLED")
except:
pass
if uos_type == "PREEMPT-RT LINUX" and ssram_enabled == 'y':
print(" --ssram \\", file=config)
for value in sel.bdf.values():
if value[vmid]:
print(" $intr_storm_monitor \\", file=config)
break
if uos_type != "PREEMPT-RT LINUX":
print(" -s 31:0,lpc \\", file=config)
# redirect console
if dm['vuart0'][vmid] == "Enable":
print(" -l com1,stdio \\", file=config)
if launch_cfg_lib.is_linux_like(uos_type) or uos_type in ("ANDROID", "ALIOS"):
if board_name == "apl-mrb":
print(" -i /run/acrn/ioc_$vm_name,0x20 \\", file=config)
print(" -l com2,/run/acrn/ioc_$vm_name \\", file=config)
if not is_nuc_whl_linux(names, vmid):
print(" -s {},wdt-i6300esb \\".format(launch_cfg_lib.virtual_dev_slot("wdt-i6300esb")), file=config)
set_dm_pt(names, sel, vmid, config, dm)
if dm['console_vuart'][vmid] == "Enable":
print(" -s {},uart,vuart_idx:0 \\".format(launch_cfg_lib.virtual_dev_slot("console_vuart")), file=config)
for vuart_id in dm["communication_vuarts"][vmid]:
if not vuart_id:
break
print(" -s {},uart,vuart_idx:{} \\".format(
launch_cfg_lib.virtual_dev_slot("communication_vuart_{}".format(vuart_id)), vuart_id), file=config)
print(" $vm_name", file=config)
print("}", file=config)
def gen(names, pt_sel, virt_io, dm, vmid, config):
board_name = names['board_name']
uos_type = names['uos_types'][vmid]
# passthrough bdf/vpid dictionay
pt.gen_pt_head(names, dm, pt_sel, vmid, config)
# gen launch header
launch_begin(names, virt_io, vmid, config)
tap_uos_net(names, virt_io, vmid, config)
# passthrough device
pt.gen_pt(names, dm, pt_sel, vmid, config)
wa_usage(uos_type, config)
mem_size_set(dm, vmid, config)
interrupt_storm(pt_sel, config)
log_level_set(uos_type, config)
# gen acrn-dm args
dm_arg_set(names, pt_sel, virt_io, dm, vmid, config)
# gen launch end
launch_end(names, dm, virt_io, vmid, config) | print("#threshold/s,probe-period(s),intr-inject-delay-time(ms),delay-duration(ms)", file=config)
print('intr_storm_monitor="--intr_monitor 10000,10,1,100"', file=config)
print("", file=config)
|
simple-driver.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2021 UT-Battelle, LLC and other Celeritas Developers.
# See the top-level COPYRIGHT file for details.
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""
"""
import json
import subprocess
from os import environ, path
from sys import exit, argv
try:
geometry_filename = argv[1]
hepmc3_filename = argv[2]
except (IndexError, TypeError):
print("usage: {} inp.gdml inp.hepmc3".format(argv[0]))
exit(2)
geant_exp_exe = environ.get('CELERITAS_GEANT_EXPORTER_EXE', './geant-exporter')
physics_filename = path.basename(geometry_filename) + ".root"
result_ge = subprocess.run([geant_exp_exe,
geometry_filename,
physics_filename])
if result_ge.returncode:
print("fatal: geant-exporter failed with error", result_ge.returncode)
exit(result_ge.returncode)
inp = {
'run': {
'geometry_filename': geometry_filename,
'physics_filename': physics_filename,
'hepmc3_filename': hepmc3_filename,
'seed': 12345,
'max_num_tracks': 128 * 32,
'max_steps': 128,
'storage_factor': 10
}
}
exe = environ.get('CELERITAS_DEMO_EXE', './demo-loop')
print("Input:")
with open(f'{exe}.inp.json', 'w') as f:
json.dump(inp, f, indent=1)
print(json.dumps(inp, indent=1))
print("Running", exe)
result = subprocess.run([exe, '-'],
input=json.dumps(inp).encode(),
stdout=subprocess.PIPE)
if result.returncode:
print("fatal: run failed with error", result.returncode)
exit(result.returncode)
print("Received {} bytes of data".format(len(result.stdout)))
out_text = result.stdout.decode()
# Filter out spurious HepMC3 output
out_text = out_text[out_text.find('\n{') + 1:]
try:
result = json.loads(out_text)
except json.decoder.JSONDecodeError as e:
print("error: expected a JSON object but got the following stdout:")
print(out_text) | exit(1)
print(json.dumps(result, indent=1))
with open(f'{exe}.out.json', 'w') as f:
json.dump(result, f) | print("fatal:", str(e)) |
extensions_builder_test.go | // Copyright The OpenTelemetry Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package builder
import (
"context"
"fmt"
"testing"
"github.com/stretchr/testify/assert"
"go.opentelemetry.io/collector/component"
"go.opentelemetry.io/collector/component/componenttest"
"go.opentelemetry.io/collector/config"
"go.opentelemetry.io/collector/extension/extensionhelper"
)
func | (t *testing.T) {
errExtensionFactory := extensionhelper.NewFactory(
"err",
func() config.Extension {
cfg := config.NewExtensionSettings(config.NewComponentID("err"))
return &cfg
},
func(ctx context.Context, set component.ExtensionCreateSettings, extension config.Extension) (component.Extension, error) {
return nil, fmt.Errorf("cannot create \"err\" extension type")
},
)
errExtensionConfig := errExtensionFactory.CreateDefaultConfig()
badExtensionFactory := newBadExtensionFactory()
badExtensionCfg := badExtensionFactory.CreateDefaultConfig()
tests := []struct {
name string
factories component.Factories
config *config.Config
wantErrMsg string
}{
{
name: "extension_not_configured",
config: &config.Config{
Service: config.Service{
Extensions: []config.ComponentID{
config.NewComponentID("myextension"),
},
},
},
wantErrMsg: "extension \"myextension\" is not configured",
},
{
name: "missing_extension_factory",
config: &config.Config{
Extensions: map[config.ComponentID]config.Extension{
config.NewComponentID(errExtensionFactory.Type()): errExtensionConfig,
},
Service: config.Service{
Extensions: []config.ComponentID{
config.NewComponentID(errExtensionFactory.Type()),
},
},
},
wantErrMsg: "extension factory for type \"err\" is not configured",
},
{
name: "error_on_create_extension",
factories: component.Factories{
Extensions: map[config.Type]component.ExtensionFactory{
errExtensionFactory.Type(): errExtensionFactory,
},
},
config: &config.Config{
Extensions: map[config.ComponentID]config.Extension{
config.NewComponentID(errExtensionFactory.Type()): errExtensionConfig,
},
Service: config.Service{
Extensions: []config.ComponentID{
config.NewComponentID(errExtensionFactory.Type()),
},
},
},
wantErrMsg: "failed to create extension err: cannot create \"err\" extension type",
},
{
name: "bad_factory",
factories: component.Factories{
Extensions: map[config.Type]component.ExtensionFactory{
badExtensionFactory.Type(): badExtensionFactory,
},
},
config: &config.Config{
Extensions: map[config.ComponentID]config.Extension{
config.NewComponentID(badExtensionFactory.Type()): badExtensionCfg,
},
Service: config.Service{
Extensions: []config.ComponentID{
config.NewComponentID(badExtensionFactory.Type()),
},
},
},
wantErrMsg: "factory for bf produced a nil extension",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
ext, err := BuildExtensions(componenttest.NewNopTelemetrySettings(), component.NewDefaultBuildInfo(), tt.config, tt.factories.Extensions)
assert.Error(t, err)
assert.EqualError(t, err, tt.wantErrMsg)
assert.Equal(t, 0, len(ext))
})
}
}
| TestService_setupExtensions |
main.go | // Copyright 2019 Liquidata, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// This file incorporates work covered by the following copyright and
// permission notice:
//
// Copyright 2016 Attic Labs, Inc. All rights reserved.
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
package main
import (
"bytes"
"context"
"encoding/binary"
"fmt"
"time"
flag "github.com/juju/gnuflag"
"github.com/dolthub/dolt/go/store/chunks"
"github.com/dolthub/dolt/go/store/d"
"github.com/dolthub/dolt/go/store/datas"
"github.com/dolthub/dolt/go/store/types"
"github.com/dolthub/dolt/go/store/util/profile"
)
var (
count = flag.Uint64("count", 100000, "number of elements")
blobSize = flag.Uint64("blobsize", 2<<24 /* 32MB */, "size of blob of create")
)
const numberSize = uint64(8)
const strPrefix = "i am a 32 bytes.....%12d"
const stringSize = uint64(32)
const structSize = uint64(64)
func main() {
profile.RegisterProfileFlags(flag.CommandLine)
flag.Parse(true)
buildCount := *count
insertCount := buildCount
defer profile.MaybeStartProfile().Stop()
collectionTypes := []string{"List", "Set", "Map"}
buildFns := []buildCollectionFn{buildList, buildSet, buildMap}
buildIncrFns := []buildCollectionFn{buildListIncrementally, buildSetIncrementally, buildMapIncrementally}
readFns := []readCollectionFn{readList, readSet, readMap}
elementTypes := []string{"numbers (8 B)", "strings (32 B)", "structs (64 B)"}
elementSizes := []uint64{numberSize, stringSize, structSize}
valueFns := []createValueFn{createNumber, createString, createStruct}
for i, colType := range collectionTypes {
fmt.Printf("Testing %s: \t\tbuild %d\t\t\tscan %d\t\t\tinsert %d\n", colType, buildCount, buildCount, insertCount)
for j, elementType := range elementTypes {
valueFn := valueFns[j]
// Build One-Time
storage := &chunks.MemoryStorage{}
db := datas.NewDatabase(storage.NewView())
ds, err := db.GetDataset(context.Background(), "test")
d.Chk.NoError(err)
t1 := time.Now()
col := buildFns[i](db, buildCount, valueFn)
ds, err = db.CommitValue(context.Background(), ds, col)
d.Chk.NoError(err)
buildDuration := time.Since(t1)
// Read
t1 = time.Now()
val, ok, err := ds.MaybeHeadValue()
d.Chk.NoError(err)
d.Chk.True(ok)
col = val.(types.Collection)
readFns[i](col)
readDuration := time.Since(t1)
// Build Incrementally
storage = &chunks.MemoryStorage{}
db = datas.NewDatabase(storage.NewView())
ds, err = db.GetDataset(context.Background(), "test")
d.Chk.NoError(err)
t1 = time.Now()
col = buildIncrFns[i](db, insertCount, valueFn)
ds, err = db.CommitValue(context.Background(), ds, col)
d.Chk.NoError(err)
incrDuration := time.Since(t1)
elementSize := elementSizes[j]
buildSize := elementSize * buildCount
incrSize := elementSize * insertCount
fmt.Printf("%s\t\t%s\t\t%s\t\t%s\n", elementType, rate(buildDuration, buildSize), rate(readDuration, buildSize), rate(incrDuration, incrSize))
}
fmt.Println()
}
fmt.Printf("Testing Blob: \t\tbuild %d MB\t\t\tscan %d MB\n", *blobSize/1000000, *blobSize/1000000)
storage := &chunks.MemoryStorage{}
db := datas.NewDatabase(storage.NewView())
ds, err := db.GetDataset(context.Background(), "test")
d.Chk.NoError(err)
blobBytes := makeBlobBytes(*blobSize)
t1 := time.Now()
blob, err := types.NewBlob(context.Background(), db, bytes.NewReader(blobBytes))
d.Chk.NoError(err)
_, err = db.CommitValue(context.Background(), ds, blob)
d.Chk.NoError(err)
buildDuration := time.Since(t1)
db = datas.NewDatabase(storage.NewView())
ds, err = db.GetDataset(context.Background(), "test")
d.Chk.NoError(err)
t1 = time.Now()
blobVal, ok, err := ds.MaybeHeadValue()
d.Chk.NoError(err)
d.Chk.True(ok)
blob = blobVal.(types.Blob)
buff := &bytes.Buffer{}
blob.Copy(context.Background(), buff)
outBytes := buff.Bytes()
readDuration := time.Since(t1)
d.PanicIfFalse(bytes.Equal(blobBytes, outBytes))
fmt.Printf("\t\t\t%s\t\t%s\n\n", rate(buildDuration, *blobSize), rate(readDuration, *blobSize))
}
func | (d time.Duration, size uint64) string {
return fmt.Sprintf("%d ms (%.2f MB/s)", uint64(d)/1000000, float64(size)*1000/float64(d))
}
type createValueFn func(i uint64) types.Value
type buildCollectionFn func(vrw types.ValueReadWriter, count uint64, createFn createValueFn) types.Collection
type readCollectionFn func(value types.Collection)
func makeBlobBytes(byteLength uint64) []byte {
buff := &bytes.Buffer{}
counter := uint64(0)
for uint64(buff.Len()) < byteLength {
err := binary.Write(buff, binary.BigEndian, counter)
d.Chk.NoError(err)
counter++
}
return buff.Bytes()
}
func createString(i uint64) types.Value {
return types.String(fmt.Sprintf("%s%d", strPrefix, i))
}
func createNumber(i uint64) types.Value {
return types.Float(i)
}
var structTemplate = types.MakeStructTemplate("S1", []string{"bool", "num", "str"})
func createStruct(i uint64) types.Value {
st, err := structTemplate.NewStruct(types.Format_7_18, []types.Value{
types.Bool(i%2 == 0), // "bool"
types.Float(i), // "num"
types.String(fmt.Sprintf("i am a 55 bytes............................%12d", i)), // "str"
})
d.Chk.NoError(err)
return st
}
func buildList(vrw types.ValueReadWriter, count uint64, createFn createValueFn) types.Collection {
values := make([]types.Value, count)
for i := uint64(0); i < count; i++ {
values[i] = createFn(i)
}
l, err := types.NewList(context.Background(), vrw, values...)
d.Chk.NoError(err)
return l
}
func buildListIncrementally(vrw types.ValueReadWriter, count uint64, createFn createValueFn) types.Collection {
l, err := types.NewList(context.Background(), vrw)
d.Chk.NoError(err)
le := l.Edit()
for i := uint64(0); i < count; i++ {
le.Append(createFn(i))
}
l, err = le.List(context.Background())
d.Chk.NoError(err)
return l
}
func readList(c types.Collection) {
_ = c.(types.List).IterAll(context.Background(), func(v types.Value, idx uint64) error {
return nil
})
}
func buildSet(vrw types.ValueReadWriter, count uint64, createFn createValueFn) types.Collection {
values := make([]types.Value, count)
for i := uint64(0); i < count; i++ {
values[i] = createFn(i)
}
s, err := types.NewSet(context.Background(), vrw, values...)
d.Chk.NoError(err)
return s
}
func buildSetIncrementally(vrw types.ValueReadWriter, count uint64, createFn createValueFn) types.Collection {
s, err := types.NewSet(context.Background(), vrw)
d.Chk.NoError(err)
se := s.Edit()
for i := uint64(0); i < count; i++ {
se.Insert(createFn(i))
}
s, err = se.Set(context.Background())
d.Chk.NoError(err)
return s
}
func readSet(c types.Collection) {
_ = c.(types.Set).IterAll(context.Background(), func(v types.Value) error {
return nil
})
}
func buildMap(vrw types.ValueReadWriter, count uint64, createFn createValueFn) types.Collection {
values := make([]types.Value, count*2)
for i := uint64(0); i < count*2; i++ {
values[i] = createFn(i)
}
m, err := types.NewMap(context.Background(), vrw, values...)
d.Chk.NoError(err)
return m
}
func buildMapIncrementally(vrw types.ValueReadWriter, count uint64, createFn createValueFn) types.Collection {
m, err := types.NewMap(context.Background(), vrw)
d.Chk.NoError(err)
me := m.Edit()
for i := uint64(0); i < count*2; i += 2 {
me.Set(createFn(i), createFn(i+1))
}
m, err = me.Map(context.Background())
d.Chk.NoError(err)
return m
}
func readMap(c types.Collection) {
_ = c.(types.Map).IterAll(context.Background(), func(k types.Value, v types.Value) error {
return nil
})
}
| rate |
setup.py | from setuptools import setup, find_packages
| setup(
name='spektral',
version='0.6.0',
packages=find_packages(),
install_requires=['tensorflow>=2.1.0',
'networkx',
'pandas',
'lxml',
'joblib',
'numpy',
'scipy',
'requests',
'scikit-learn'],
url='https://github.com/danielegrattarola/spektral',
license='MIT',
author='Daniele Grattarola',
author_email='[email protected]',
description='Graph Neural Networks with Keras and Tensorflow 2.',
long_description=long_description,
long_description_content_type="text/markdown",
classifiers=[
"Programming Language :: Python :: 3.5"
],
) | with open("README.md", "r") as fh:
long_description = fh.read()
|
ggcash_en.ts | <?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE TS>
<TS version="2.1" language="en">
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="+67"/>
<source>Right-click to edit address or label</source>
<translation>Right-click to edit address or label</translation>
</message>
<message>
<location line="+27"/>
<source>Create a new address</source>
<translation>Create a new address</translation>
</message>
<message>
<location line="+3"/>
<source>&New</source>
<translation>&New</translation>
</message>
<message>
<location line="+14"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Copy the currently selected address to the system clipboard</translation>
</message>
<message>
<location line="+3"/>
<source>&Copy</source>
<translation>&Copy</translation>
</message>
<message>
<location line="+14"/>
<source>Delete the currently selected address from the list</source>
<translation>Delete the currently selected address from the list</translation>
</message>
<message>
<location line="+3"/>
<source>&Delete</source>
<translation>&Delete</translation>
</message>
<message>
<location line="+27"/>
<source>Export the data in the current tab to a file</source>
<translation>Export the data in the current tab to a file</translation>
</message>
<message>
<location line="+3"/>
<source>&Export</source>
<translation>&Export</translation>
</message>
<message>
<location line="+20"/>
<source>C&lose</source>
<translation>C&lose</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="+44"/>
<source>Choose the address to send coins to</source>
<translation>Choose the address to send coins to</translation>
</message>
<message>
<location line="+3"/>
<source>Choose the address to receive coins with</source>
<translation>Choose the address to receive coins with</translation>
</message>
<message>
<location line="+6"/>
<source>C&hoose</source>
<translation>C&hoose</translation>
</message>
<message>
<location line="+6"/>
<source>Sending addresses</source>
<translation>Sending addresses</translation>
</message>
<message>
<location line="+3"/>
<source>Receiving addresses</source>
<translation>Receiving addresses</translation>
</message>
<message>
<location line="+7"/>
<source>These are your GGCash addresses for sending payments. Always check the amount and the receiving address before sending coins.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+4"/>
<source>These are your GGCash addresses for receiving payments. It is recommended to use a new receiving address for each transaction.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+6"/>
<source>&Copy Address</source>
<translation>&Copy Address</translation>
</message>
<message>
<location line="+1"/>
<source>Copy &Label</source>
<translation>Copy &Label</translation>
</message>
<message>
<location line="+1"/>
<source>&Edit</source>
<translation>&Edit</translation>
</message>
<message>
<location line="+182"/>
<source>Export Address List</source>
<translation>Export Address List</translation>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Comma separated file (*.csv)</translation>
</message>
<message>
<location line="+13"/>
<source>Exporting Failed</source>
<translation>Exporting Failed</translation>
</message>
<message>
<location line="+1"/>
<source>There was an error trying to save the address list to %1. Please try again.</source>
<translation>There was an error trying to save the address list to %1. Please try again.</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="+199"/>
<source>Label</source>
<translation>Label</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Address</translation>
</message>
<message>
<location line="+33"/>
<source>(no label)</source>
<translation>(no label)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="+26"/>
<source>Passphrase Dialog</source>
<translation>Passphrase Dialog</translation>
</message>
<message>
<location line="+30"/>
<source>Enter passphrase</source>
<translation>Enter passphrase</translation>
</message>
<message>
<location line="+14"/>
<source>New passphrase</source>
<translation>New passphrase</translation>
</message>
<message>
<location line="+14"/>
<source>Repeat new passphrase</source>
<translation>Repeat new passphrase</translation>
</message>
<message>
<location line="+33"/>
<source>Serves to disable the trivial sendmoney when OS account compromised. Provides no real security.</source>
<translation>Serves to disable the trivial sendmoney when OS account compromised. Provides no real security.</translation>
</message>
<message>
<location line="+3"/>
<source>For anonymization, automint, and staking only</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="+48"/>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>ten or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>ten or more random characters</b>, or <b>eight or more words</b>.</translation>
</message>
<message>
<location line="+3"/>
<source>Encrypt wallet</source>
<translation>Encrypt wallet</translation>
</message>
<message>
<location line="+5"/>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>This operation needs your wallet passphrase to unlock the wallet.</translation>
</message>
<message>
<location line="+5"/>
<source>Unlock wallet</source>
<translation>Unlock wallet</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>This operation needs your wallet passphrase to decrypt the wallet.</translation>
</message>
<message>
<location line="+5"/>
<source>Decrypt wallet</source>
<translation>Decrypt wallet</translation>
</message>
<message>
<location line="+3"/>
<source>Change passphrase</source>
<translation>Change passphrase</translation>
</message>
<message>
<location line="+1"/>
<source>Enter the old and new passphrase to the wallet.</source>
<translation>Enter the old and new passphrase to the wallet.</translation>
</message>
<message>
<location line="+52"/>
<source>Confirm wallet encryption</source>
<translation>Confirm wallet encryption</translation>
</message>
<message>
<location line="+9"/>
<source>GGCash will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your GGHs from being stolen by malware infecting your computer.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-8"/>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation>Are you sure you wish to encrypt your wallet?</translation>
</message>
<message>
<location line="+0"/>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR GGH</b>!</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+6"/>
<location line="+45"/>
<source>Wallet encrypted</source>
<translation>Wallet encrypted</translation>
</message>
<message>
<location line="-39"/>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</translation>
</message>
<message>
<location line="+7"/>
<location line="+5"/>
<location line="+31"/>
<location line="+4"/>
<source>Wallet encryption failed</source>
<translation>Wallet encryption failed</translation>
</message>
<message>
<location line="-39"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</translation>
</message>
<message>
<location line="+5"/>
<location line="+35"/>
<source>The supplied passphrases do not match.</source>
<translation>The supplied passphrases do not match.</translation>
</message>
<message>
<location line="-26"/>
<source>Wallet unlock failed</source>
<translation>Wallet unlock failed</translation>
</message>
<message>
<location line="+1"/>
<location line="+8"/>
<location line="+13"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>The passphrase entered for the wallet decryption was incorrect.</translation>
</message>
<message>
<location line="-14"/>
<source>Wallet decryption failed</source>
<translation>Wallet decryption failed</translation>
</message>
<message>
<location line="+10"/>
<source>Wallet passphrase was successfully changed.</source>
<translation>Wallet passphrase was successfully changed.</translation>
</message>
<message>
<location line="+43"/>
<location line="+24"/>
<source>Warning: The Caps Lock key is on!</source>
<translation>Warning: The Caps Lock key is on!</translation>
</message>
</context>
<context>
<name>BanTableModel</name>
<message>
<location filename="../bantablemodel.cpp" line="+87"/>
<source>IP/Netmask</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+0"/>
<source>Banned Until</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>Bip38ToolDialog</name>
<message>
<location filename="../forms/bip38tooldialog.ui" line="+14"/>
<source>BIP 38 Tool</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+13"/>
<source>&BIP 38 Encrypt</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+30"/>
<location line="+400"/>
<source>Address:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-424"/>
<source>Enter a GGCash Address that you would like to encrypt using BIP 38. Enter a passphrase in the middle box. Press encrypt to compute the encrypted private key.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+31"/>
<source>The GGCash address to encrypt</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>Choose previously used address</source>
<translation type="unfinished">Choose previously used address</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation type="unfinished">Alt+A</translation>
</message>
<message>
<location line="+7"/>
<location line="+220"/>
<source>Paste address from clipboard</source>
<translation type="unfinished">Paste address from clipboard</translation>
</message>
<message>
<location line="-210"/>
<location line="+220"/>
<source>Alt+P</source>
<translation type="unfinished">Alt+P</translation>
</message>
<message>
<location line="-200"/>
<location line="+220"/>
<source>Passphrase: </source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-193"/>
<location line="+149"/>
<source>Encrypted Key:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-127"/>
<source>Copy the current signature to the system clipboard</source>
<translation type="unfinished">Copy the current signature to the system clipboard</translation>
</message>
<message>
<location line="+18"/>
<source>Encrypt the private key for this GGCash address</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+17"/>
<location line="+171"/>
<source>Reset all fields</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-72"/>
<source>The encrypted private key</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+55"/>
<source>Decrypt the entered key using the passphrase</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-168"/>
<source>Encrypt &Key</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+17"/>
<location line="+171"/>
<source>Clear &All</source>
<translation type="unfinished">Clear &All</translation>
</message>
<message>
<location line="-112"/>
<source>&BIP 38 Decrypt</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+6"/>
<source>Enter the BIP 38 encrypted private key. Enter the passphrase in the middle box. Click Decrypt Key to compute the private key. After the key is decrypted, clicking 'Import Address' will add this private key to the wallet.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+89"/>
<source>Decrypt &Key</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+77"/>
<source>Decrypted Key:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+14"/>
<source>Import Address</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../bip38tooldialog.cpp" line="+29"/>
<source>Click "Decrypt Key" to compute key</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+89"/>
<source>The entered passphrase is invalid. </source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+0"/>
<source>Allowed: 0-9,a-z,A-Z,</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>The entered address is invalid.</source>
<translation type="unfinished">The entered address is invalid.</translation>
</message>
<message>
<location line="+0"/>
<location line="+8"/>
<source>Please check the address and try again.</source>
<translation type="unfinished">Please check the address and try again.</translation>
</message>
<message>
<location line="+0"/>
<source>The entered address does not refer to a key.</source>
<translation type="unfinished">The entered address does not refer to a key.</translation>
</message>
<message>
<location line="+7"/>
<location line="+63"/>
<source>Wallet unlock was cancelled.</source>
<translation type="unfinished">Wallet unlock was cancelled.</translation>
</message>
<message>
<location line="-56"/>
<source>Private key for the entered address is not available.</source>
<translation type="unfinished">Private key for the entered address is not available.</translation>
</message>
<message>
<location line="+39"/>
<source>Failed to decrypt.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+0"/>
<source>Please check the key and passphrase and try again.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+26"/>
<source>Data Not Valid.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+0"/>
<source>Please try again.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>Please wait while key is imported</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+8"/>
<source>Key Already Held By Wallet</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+8"/>
<source>Error Adding Key To Wallet</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+10"/>
<source>Successfully Added Private Key To Wallet</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<location filename="../bitcoingui.cpp" line="+123"/>
<source>Wallet</source>
<translation>Wallet</translation>
</message>
<message>
<location line="+2"/>
<source>Node</source>
<translation>Node</translation>
</message>
<message>
<location line="+174"/>
<source>&Overview</source>
<translation>&Overview</translation>
</message>
<message>
<location line="+1"/>
<source>Show general overview of wallet</source>
<translation>Show general overview of wallet</translation>
</message>
<message>
<location line="+10"/>
<source>&Send</source>
<translation>&Send</translation>
</message>
<message>
<location line="+11"/>
<source>&Receive</source>
<translation>&Receive</translation>
</message>
<message>
<location line="+11"/>
<source>&Transactions</source>
<translation>&Transactions</translation>
</message>
<message>
<location line="+1"/>
<source>Browse transaction history</source>
<translation>Browse transaction history</translation>
</message>
<message>
<location line="+11"/>
<source>Privacy Actions for zGGH</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+28"/>
<source>&Governance</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Show Proposals</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+25"/>
<source>E&xit</source>
<translation>E&xit</translation>
</message>
<message>
<location line="+1"/>
<source>Quit application</source>
<translation>Quit application</translation>
</message>
<message>
<location line="+6"/>
<source>About &Qt</source>
<translation>About &Qt</translation>
</message>
<message>
<location line="+1"/>
<source>Show information about Qt</source>
<translation>Show information about Qt</translation>
</message>
<message>
<location line="+2"/>
<source>&Options...</source>
<translation>&Options...</translation>
</message>
<message>
<location line="+3"/>
<source>&Show / Hide</source>
<translation>&Show / Hide</translation>
</message>
<message>
<location line="+1"/>
<source>Show or hide the main Window</source>
<translation>Show or hide the main Window</translation>
</message>
<message>
<location line="+2"/>
<source>&Encrypt Wallet...</source>
<translation>&Encrypt Wallet...</translation>
</message>
<message>
<location line="+1"/>
<source>Encrypt the private keys that belong to your wallet</source>
<translation>Encrypt the private keys that belong to your wallet</translation>
</message>
<message>
<location line="+2"/>
<source>&Backup Wallet...</source>
<translation>&Backup Wallet...</translation>
</message>
<message>
<location line="+1"/>
<source>Backup wallet to another location</source>
<translation>Backup wallet to another location</translation>
</message>
<message>
<location line="+1"/>
<source>&Change Passphrase...</source>
<translation>&Change Passphrase...</translation>
</message>
<message>
<location line="+1"/>
<source>Change the passphrase used for wallet encryption</source>
<translation>Change the passphrase used for wallet encryption</translation>
</message>
<message>
<location line="+1"/>
<source>&Unlock Wallet...</source>
<translation>&Unlock Wallet...</translation>
</message>
<message>
<location line="+1"/>
<source>Unlock wallet</source>
<translation>Unlock wallet</translation>
</message>
<message>
<location line="+1"/>
<source>&Lock Wallet</source>
<translation>&Lock Wallet</translation>
</message>
<message>
<location line="+1"/>
<source>Sign &message...</source>
<translation>Sign &message...</translation>
</message>
<message>
<location line="+2"/>
<source>&Verify message...</source>
<translation>&Verify message...</translation>
</message>
<message>
<location line="+8"/>
<source>&Information</source>
<translation>&Information</translation>
</message>
<message>
<location line="+1"/>
<source>Show diagnostic information</source>
<translation>Show diagnostic information</translation>
</message>
<message>
<location line="+1"/>
<source>&Debug console</source>
<translation>&Debug console</translation>
</message>
<message>
<location line="+1"/>
<source>Open debugging console</source>
<translation>Open debugging console</translation>
</message>
<message>
<location line="+1"/>
<source>&Network Monitor</source>
<translation>&Network Monitor</translation>
</message>
<message>
<location line="+1"/>
<source>Show network monitor</source>
<translation>Show network monitor</translation>
</message>
<message>
<location line="+1"/>
<source>&Peers list</source>
<translation>&Peers list</translation>
</message>
<message>
<location line="+1"/>
<source>Show peers info</source>
<translation>Show peers info</translation>
</message>
<message>
<location line="+1"/>
<source>Wallet &Repair</source>
<translation>Wallet &Repair</translation>
</message>
<message>
<location line="+1"/>
<source>Show wallet repair options</source>
<translation>Show wallet repair options</translation>
</message>
<message>
<location line="+2"/>
<source>Open configuration file</source>
<translation>Open configuration file</translation>
</message>
<message>
<location line="+3"/>
<source>Show Automatic &Backups</source>
<translation>Show Automatic &Backups</translation>
</message>
<message>
<location line="+1"/>
<source>Show automatically created wallet backups</source>
<translation>Show automatically created wallet backups</translation>
</message>
<message>
<location line="+2"/>
<source>&Sending addresses...</source>
<translation>&Sending addresses...</translation>
</message>
<message>
<location line="+1"/>
<source>Show the list of used sending addresses and labels</source>
<translation>Show the list of used sending addresses and labels</translation>
</message>
<message>
<location line="+1"/>
<source>&Receiving addresses...</source>
<translation>&Receiving addresses...</translation>
</message>
<message>
<location line="+1"/>
<source>Show the list of used receiving addresses and labels</source>
<translation>Show the list of used receiving addresses and labels</translation>
</message>
<message>
<location line="+2"/>
<source>&Multisignature creation...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Create a new multisignature address and add it to this wallet</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>&Multisignature spending...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Spend from a multisignature address</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>&Multisignature signing...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Sign with a multisignature address</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Open &URI...</source>
<translation>Open &URI...</translation>
</message>
<message>
<location line="+5"/>
<source>&Command-line options</source>
<translation>&Command-line options</translation>
</message>
<message numerus="yes">
<location line="+490"/>
<source>Processed %n blocks of transaction history.</source>
<translation>
<numerusform>Processed %n block of transaction history.</numerusform>
<numerusform>Processed %n blocks of transaction history.</numerusform>
</translation>
</message>
<message>
<location line="+32"/>
<source>Synchronizing additional data: %p%</source>
<translation>Synchronizing additional data: %p%</translation>
</message>
<message>
<location line="+27"/>
<source>%1 behind. Scanning block %2</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+242"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b> for anonymization and staking only</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+30"/>
<source>Tor is <b>enabled</b>: %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-779"/>
<source>&File</source>
<translation>&File</translation>
</message>
<message>
<location line="+17"/>
<source>&Settings</source>
<translation>&Settings</translation>
</message>
<message>
<location line="+13"/>
<source>&Tools</source>
<translation>&Tools</translation>
</message>
<message>
<location line="+13"/>
<source>&Help</source>
<translation>&Help</translation>
</message>
<message>
<location line="+10"/>
<source>Tabs toolbar</source>
<translation>Tabs toolbar</translation>
</message>
<message>
<location line="-448"/>
<location line="+938"/>
<source>GGCash Core</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-742"/>
<source>Send coins to a GGCash address</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+11"/>
<source>Request payments (generates QR codes and ggcash: URIs)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+21"/>
<source>&Privacy</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+15"/>
<source>&Masternodes</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Browse masternodes</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+43"/>
<source>&About GGCash Core</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Show information about GGCash Core</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+6"/>
<source>Modify configuration options for GGCash</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+16"/>
<source>Sign messages with your GGCash addresses to prove you own them</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Verify messages to ensure they were signed with specified GGCash addresses</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>&BIP38 tool</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Encrypt and decrypt private keys using a passphrase</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>&MultiSend</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>MultiSend Settings</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+13"/>
<source>Open Wallet &Configuration File</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Open &Masternode Configuration File</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Open Masternode configuration file</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+17"/>
<source>Open a GGCash: URI or payment request</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>&Blockchain explorer</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Block explorer window</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+4"/>
<source>Show the GGCash Core help message to get a list with possible GGCash command-line options</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+234"/>
<source>GGCash Core client</source>
<translation type="unfinished"></translation>
</message>
<message numerus="yes">
<location line="+219"/>
<source>%n active connection(s) to GGCash network</source>
<translation type="unfinished">
<numerusform></numerusform>
<numerusform></numerusform>
</translation>
</message>
<message>
<location line="+15"/>
<source>Synchronizing with network...</source>
<translation>Synchronizing with network...</translation>
</message>
<message>
<location line="+3"/>
<source>Importing blocks from disk...</source>
<translation>Importing blocks from disk...</translation>
</message>
<message>
<location line="+3"/>
<source>Reindexing blocks on disk...</source>
<translation>Reindexing blocks on disk...</translation>
</message>
<message>
<location line="+4"/>
<source>No block source available...</source>
<translation>No block source available...</translation>
</message>
<message>
<location line="+16"/>
<source>Up to date</source>
<translation>Up to date</translation>
</message>
<message numerus="yes">
<location line="+41"/>
<source>%n hour(s)</source>
<translation>
<numerusform>%n hour</numerusform>
<numerusform>%n hours</numerusform>
</translation>
</message>
<message numerus="yes">
<location line="+2"/>
<source>%n day(s)</source>
<translation>
<numerusform>%n day</numerusform>
<numerusform>%n days</numerusform>
</translation>
</message>
<message numerus="yes">
<location line="+2"/>
<location line="+4"/>
<source>%n week(s)</source>
<translation>
<numerusform>%n week</numerusform>
<numerusform>%n weeks</numerusform>
</translation>
</message>
<message>
<location line="+0"/>
<source>%1 and %2</source>
<translation>%1 and %2</translation>
</message>
<message numerus="yes">
<location line="+0"/>
<source>%n year(s)</source>
<translation>
<numerusform>%n year</numerusform>
<numerusform>%n years</numerusform>
</translation>
</message>
<message>
<location line="+9"/>
<source>Catching up...</source>
<translation>Catching up...</translation>
</message>
<message>
<location line="+16"/>
<source>Last received block was generated %1 ago.</source>
<translation>Last received block was generated %1 ago.</translation>
</message>
<message>
<location line="+2"/>
<source>Transactions after this will not yet be visible.</source>
<translation>Transactions after this will not yet be visible.</translation>
</message>
<message>
<location line="+26"/>
<source>Error</source>
<translation>Error</translation>
</message>
<message>
<location line="+3"/>
<source>Warning</source>
<translation>Warning</translation>
</message>
<message>
<location line="+3"/>
<source>Information</source>
<translation>Information</translation>
</message>
<message>
<location line="+69"/>
<source>Sent transaction</source>
<translation>Sent transaction</translation>
</message>
<message>
<location line="+0"/>
<source>Incoming transaction</source>
<translation>Incoming transaction</translation>
</message>
<message>
<location line="+0"/>
<source>Sent MultiSend transaction</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation>Date: %1
Amount: %2
Type: %3
Address: %4
</translation>
</message>
<message>
<location line="+54"/>
<source>Staking is active
MultiSend: %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+0"/>
<location line="+6"/>
<source>Active</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-6"/>
<location line="+6"/>
<source>Not Active</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+0"/>
<source>Staking is not active
MultiSend: %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+12"/>
<source>AutoMint is currently enabled and set to </source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+5"/>
<source>AutoMint is disabled</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+30"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>Wallet is <b>encrypted</b> and currently <b>unlocked</b></translation>
</message>
<message>
<location line="+20"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>Wallet is <b>encrypted</b> and currently <b>locked</b></translation>
</message>
<message>
<location filename="../ggcash.cpp" line="+478"/>
<source>A fatal error occurred. GGCash can no longer continue safely and will quit.</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>BlockExplorer</name>
<message>
<location filename="../forms/blockexplorer.ui" line="+14"/>
<source>Blockchain Explorer</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+18"/>
<source>Back</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+14"/>
<source>Forward</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+29"/>
<source>Address / Block / Transaction</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+25"/>
<source>Search</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+39"/>
<source>TextLabel</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../blockexplorer.cpp" line="+480"/>
<source>Not all transactions will be shown. To view all transactions you need to set txindex=1 in the configuration file (ggcash.conf).</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<location filename="../clientmodel.cpp" line="+81"/>
<source>Total: %1 (IPv4: %2 / IPv6: %3 / Tor: %4 / Unknown: %5)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+101"/>
<source>Network Alert</source>
<translation>Network Alert</translation>
</message>
</context>
<context>
<name>CoinControlDialog</name>
<message>
<location filename="../forms/coincontroldialog.ui" line="+48"/>
<source>Quantity:</source>
<translation>Quantity:</translation>
</message>
<message>
<location line="+29"/>
<source>Bytes:</source>
<translation>Bytes:</translation>
</message>
<message>
<location line="+45"/>
<source>Amount:</source>
<translation>Amount:</translation>
</message>
<message>
<location line="+29"/>
<source>Priority:</source>
<translation>Priority:</translation>
</message>
<message>
<location line="+45"/>
<source>Fee:</source>
<translation>Fee:</translation>
</message>
<message>
<location line="-182"/>
<source>Coin Selection</source>
<translation>Coin Selection</translation>
</message>
<message>
<location line="+214"/>
<source>Dust:</source>
<translation>Dust:</translation>
</message>
<message>
<location line="+48"/>
<source>After Fee:</source>
<translation>After Fee:</translation>
</message>
<message>
<location line="+32"/>
<source>Change:</source>
<translation>Change:</translation>
</message>
<message>
<location line="+56"/>
<source>(un)select all</source>
<translation>(un)select all</translation>
</message>
<message>
<location line="+16"/>
<source>toggle lock state</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+16"/>
<source>Tree mode</source>
<translation>Tree mode</translation>
</message>
<message>
<location line="+13"/>
<source>List mode</source>
<translation>List mode</translation>
</message>
<message>
<location line="+10"/>
<source>(1 locked)</source>
<translation>(1 locked)</translation>
</message>
<message>
<location line="+46"/>
<source>Amount</source>
<translation>Amount</translation>
</message>
<message>
<location line="+5"/>
<source>Received with label</source>
<translation>Received with label</translation>
</message>
<message>
<location line="+5"/>
<source>Received with address</source>
<translation>Received with address</translation>
</message>
<message>
<location line="+5"/>
<source>Type</source>
<translation type="unfinished">Type</translation>
</message>
<message>
<location line="+5"/>
<source>Date</source>
<translation>Date</translation>
</message>
<message>
<location line="+5"/>
<source>Confirmations</source>
<translation>Confirmations</translation>
</message>
<message>
<location line="+3"/>
<source>Confirmed</source>
<translation>Confirmed</translation>
</message>
<message>
<location line="+5"/>
<source>Priority</source>
<translation>Priority</translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="+60"/>
<source>Copy address</source>
<translation>Copy address</translation>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation>Copy label</translation>
</message>
<message>
<location line="+1"/>
<location line="+26"/>
<source>Copy amount</source>
<translation>Copy amount</translation>
</message>
<message>
<location line="-25"/>
<source>Copy transaction ID</source>
<translation>Copy transaction ID</translation>
</message>
<message>
<location line="+1"/>
<source>Lock unspent</source>
<translation>Lock unspent</translation>
</message>
<message>
<location line="+1"/>
<source>Unlock unspent</source>
<translation>Unlock unspent</translation>
</message>
<message>
<location line="+22"/>
<source>Copy quantity</source>
<translation>Copy quantity</translation>
</message>
<message>
<location line="+2"/>
<source>Copy fee</source>
<translation>Copy fee</translation>
</message>
<message>
<location line="+1"/>
<source>Copy after fee</source>
<translation>Copy after fee</translation>
</message>
<message>
<location line="+1"/>
<source>Copy bytes</source>
<translation>Copy bytes</translation>
</message>
<message>
<location line="+1"/>
<source>Copy priority</source>
<translation>Copy priority</translation>
</message>
<message>
<location line="+1"/>
<source>Copy dust</source>
<translation>Copy dust</translation>
</message>
<message>
<location line="+1"/>
<source>Copy change</source>
<translation>Copy change</translation>
</message>
<message>
<location line="+147"/>
<source>Please switch to "List mode" to use this function.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+216"/>
<source>highest</source>
<translation>highest</translation>
</message>
<message>
<location line="+2"/>
<source>higher</source>
<translation>higher</translation>
</message>
<message>
<location line="+2"/>
<source>high</source>
<translation>high</translation>
</message>
<message>
<location line="+2"/>
<source>medium-high</source>
<translation>medium-high</translation>
</message>
<message>
<location filename="../forms/coincontroldialog.ui" line="-334"/>
<location filename="../coincontroldialog.cpp" line="+2"/>
<source>medium</source>
<translation>medium</translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="+2"/>
<source>low-medium</source>
<translation>low-medium</translation>
</message>
<message>
<location line="+2"/>
<source>low</source>
<translation>low</translation>
</message>
<message>
<location line="+2"/>
<source>lower</source>
<translation>lower</translation>
</message>
<message>
<location line="+2"/>
<source>lowest</source>
<translation>lowest</translation>
</message>
<message>
<location line="+9"/>
<source>(%1 locked)</source>
<translation>(%1 locked)</translation>
</message>
<message>
<location line="+62"/>
<source>none</source>
<translation>none</translation>
</message>
<message>
<location line="+126"/>
<source>yes</source>
<translation>yes</translation>
</message>
<message>
<location filename="../forms/coincontroldialog.ui" line="+80"/>
<location filename="../coincontroldialog.cpp" line="+0"/>
<source>no</source>
<translation>no</translation>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="+15"/>
<source>This label turns red, if the transaction size is greater than 1000 bytes.</source>
<translation>This label turns red, if the transaction size is greater than 1000 bytes.</translation>
</message>
<message>
<location line="+1"/>
<location line="+5"/>
<source>This means a fee of at least %1 per kB is required.</source>
<translation>This means a fee of at least %1 per kB is required.</translation>
</message>
<message>
<location line="-4"/>
<source>Can vary +/- 1 byte per input.</source>
<translation>Can vary +/- 1 byte per input.</translation>
</message>
<message>
<location line="+2"/>
<source>Transactions with higher priority are more likely to get included into a block.</source>
<translation>Transactions with higher priority are more likely to get included into a block.</translation>
</message>
<message>
<location line="+1"/>
<source>This label turns red, if the priority is smaller than "medium".</source>
<translation>This label turns red, if the priority is smaller than "medium".</translation>
</message>
<message>
<location line="+3"/>
<source>This label turns red, if any recipient receives an amount smaller than %1.</source>
<translation>This label turns red, if any recipient receives an amount smaller than %1.</translation>
</message>
<message>
<location line="+8"/>
<source>Can vary +/- %1 uggh per input.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+46"/>
<location line="+81"/>
<source>(no label)</source>
<translation>(no label)</translation>
</message>
<message>
<location line="-5"/>
<source>change from %1 (%2)</source>
<translation>change from %1 (%2)</translation>
</message>
<message>
<location line="+1"/>
<source>(change)</source>
<translation>(change)</translation>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="+14"/>
<source>Edit Address</source>
<translation>Edit Address</translation>
</message>
<message>
<location line="+11"/>
<source>&Label</source>
<translation>&Label</translation>
</message>
<message>
<location line="+10"/>
<source>The label associated with this address list entry</source>
<translation>The label associated with this address list entry</translation>
</message>
<message>
<location line="+7"/>
<source>&Address</source>
<translation>&Address</translation>
</message>
<message>
<location line="+10"/>
<source>The address associated with this address list entry. This can only be modified for sending addresses.</source>
<translation>The address associated with this address list entry. This can only be modified for sending addresses.</translation>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="+28"/>
<source>New receiving address</source>
<translation>New receiving address</translation>
</message>
<message>
<location line="+4"/>
<source>New sending address</source>
<translation>New sending address</translation>
</message>
<message>
<location line="+3"/>
<source>Edit receiving address</source>
<translation>Edit receiving address</translation>
</message>
<message>
<location line="+4"/>
<source>Edit sending address</source>
<translation>Edit sending address</translation>
</message>
<message>
<location line="+67"/>
<source>The entered address "%1" is not a valid GGCash address.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+5"/>
<source>The entered address "%1" is already in the address book.</source>
<translation>The entered address "%1" is already in the address book.</translation>
</message>
<message>
<location line="+5"/>
<source>Could not unlock wallet.</source>
<translation>Could not unlock wallet.</translation>
</message>
<message>
<location line="+5"/>
<source>New key generation failed.</source>
<translation>New key generation failed.</translation>
</message>
</context>
<context>
<name>FreespaceChecker</name>
<message>
<location filename="../intro.cpp" line="+70"/>
<source>A new data directory will be created.</source>
<translation>A new data directory will be created.</translation>
</message>
<message>
<location line="+19"/>
<source>name</source>
<translation>name</translation>
</message>
<message>
<location line="+2"/>
<source>Directory already exists. Add %1 if you intend to create a new directory here.</source>
<translation>Directory already exists. Add %1 if you intend to create a new directory here.</translation>
</message>
<message>
<location line="+3"/>
<source>Path already exists, and is not a directory.</source>
<translation>Path already exists, and is not a directory.</translation>
</message>
<message>
<location line="+6"/>
<source>Cannot create data directory here.</source>
<translation>Cannot create data directory here.</translation>
</message>
</context>
<context>
<name>GovernancePage</name>
<message>
<location filename="../forms/governancepage.ui" line="+14"/>
<source>Form</source>
<translation type="unfinished">Form</translation>
</message>
<message>
<location line="+41"/>
<source>GOVERNANCE</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+141"/>
<source>Update Proposals</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+33"/>
<source>Next super block:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<location line="+14"/>
<location line="+14"/>
<location line="+14"/>
<location line="+14"/>
<location line="+14"/>
<source>0</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-63"/>
<source>Blocks to next super block:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+14"/>
<source>Days to budget payout (estimate):</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+14"/>
<source>Allotted budget:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+14"/>
<source>Budget left:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+14"/>
<source>Masternodes count:</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>HelpMessageDialog</name>
<message>
<location filename="../utilitydialog.cpp" line="+37"/>
<source>version</source>
<translation>version</translation>
</message>
<message>
<location line="+0"/>
<source>GGCash Core</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+5"/>
<location line="+2"/>
<source>(%1-bit)</source>
<translation>(%1-bit)</translation>
</message>
<message>
<location line="+4"/>
<source>About GGCash Core</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+20"/>
<source>Command-line options</source>
<translation>Command-line options</translation>
</message>
<message>
<location line="+1"/>
<source>Usage:</source>
<translation>Usage:</translation>
</message>
<message>
<location line="+1"/>
<source>command-line options</source>
<translation>command-line options</translation>
</message>
<message>
<location line="+8"/>
<source>UI Options:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Choose data directory on startup (default: %u)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+4"/>
<source>Show splash screen on startup (default: %u)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-3"/>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation>Set language, for example "de_DE" (default: system locale)</translation>
</message>
<message>
<location line="+1"/>
<source>Start minimized</source>
<translation>Start minimized</translation>
</message>
<message>
<location line="+1"/>
<source>Set SSL root certificates for payment request (default: -system-)</source>
<translation>Set SSL root certificates for payment request (default: -system-)</translation>
</message>
</context>
<context>
<name>Intro</name>
<message>
<location filename="../forms/intro.ui" line="+14"/>
<source>Welcome</source>
<translation>Welcome</translation>
</message>
<message>
<location line="+9"/>
<source>Welcome to GGCash Core.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+26"/>
<source>As this is the first time the program is launched, you can choose where GGCash Core will store its data.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+10"/>
<source>GGCash Core will download and store a copy of the GGCash block chain. At least %1GB of data will be stored in this directory, and it will grow over time. The wallet will also be stored in this directory.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+10"/>
<source>Use the default data directory</source>
<translation>Use the default data directory</translation>
</message>
<message>
<location line="+7"/>
<source>Use a custom data directory:</source>
<translation>Use a custom data directory:</translation>
</message>
<message>
<location filename="../intro.cpp" line="+77"/>
<source>GGCash Core</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Error: Specified data directory "%1" cannot be created.</source>
<translation>Error: Specified data directory "%1" cannot be created.</translation>
</message>
<message>
<location line="+24"/>
<source>Error</source>
<translation>Error</translation>
</message>
<message>
<location line="+8"/>
<source>%1 GB of free space available</source>
<translation>%1 GB of free space available</translation>
</message>
<message>
<location line="+2"/>
<source>(of %1 GB needed)</source>
<translation>(of %1 GB needed)</translation>
</message>
</context>
<context>
<name>MasternodeList</name>
<message>
<location filename="../forms/masternodelist.ui" line="+14"/>
<source>Form</source>
<translation type="unfinished">Form</translation>
</message>
<message>
<location line="+41"/>
<source>MASTERNODES</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+83"/>
<source>Note: Status of your masternodes in local wallet can potentially be slightly incorrect.<br />Always wait for wallet to sync additional data and then double check from another node<br />if your node should be running but you still see "MISSING" in "Status" field.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+71"/>
<source>Alias</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+5"/>
<source>Address</source>
<translation type="unfinished">Address</translation>
</message>
<message>
<location line="+5"/>
<source>Protocol</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+5"/>
<source>Status</source>
<translation type="unfinished">Status</translation>
</message>
<message>
<location line="+5"/>
<source>Active</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+5"/>
<source>Last Seen (UTC)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+5"/>
<source>Pubkey</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+13"/>
<source>S&tart alias</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>Start &all</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>Start &MISSING</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>&Update status</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>Status will be updated automatically in (sec):</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>0</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../masternodelist.cpp" line="+52"/>
<source>Start alias</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+189"/>
<source>Confirm masternode start</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Are you sure you want to start masternode %1?</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+23"/>
<source>Confirm all masternodes start</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Are you sure you want to start ALL masternodes?</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+23"/>
<source>Command is not available right now</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>You can't use this command until masternode list is synced</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+6"/>
<source>Confirm missing masternodes start</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Are you sure you want to start MISSING masternodes?</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>MultiSendDialog</name>
<message>
<location filename="../forms/multisenddialog.ui" line="+17"/>
<source>MultiSend</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+87"/>
<source>Enter whole numbers 1 - 100</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Enter % to Give (1-100)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+30"/>
<source>Enter Address to Send to</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-106"/>
<source>MultiSend allows you to automatically send up to 100% of your stake or masternode reward to a list of other GGCash addresses after it matures.
To Add: enter percentage to give and GGCash address to add to the MultiSend vector.
To Delete: Enter address to delete and press delete.
MultiSend will not be activated unless you have clicked Activate</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+207"/>
<source>Add to MultiSend Vector</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Add</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+34"/>
<source>Deactivate MultiSend</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Deactivate</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-128"/>
<source>Choose an address from the address book</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation type="unfinished">Alt+A</translation>
</message>
<message>
<location line="-66"/>
<source>Percentage of stake to send</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Percentage:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+27"/>
<source>Address to send portion of stake to</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Address:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+52"/>
<source>Label:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>Enter a label for this address to add it to your address book</source>
<translation type="unfinished">Enter a label for this address to add it to your address book</translation>
</message>
<message>
<location line="+65"/>
<source>Delete Address From MultiSend Vector</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Delete</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+11"/>
<source>Activate MultiSend</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Activate</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-47"/>
<source>View MultiSend Vector</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>View MultiSend</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-154"/>
<source>Send For Stakes</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>Send For Masternode Rewards</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../multisenddialog.cpp" line="+64"/>
<source>(no label)</source>
<translation type="unfinished">(no label)</translation>
</message>
<message>
<location line="+11"/>
<source>MultiSend Active for Stakes and Masternode Rewards</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>MultiSend Active for Stakes</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>MultiSend Active for Masternode Rewards</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>MultiSend Not Active</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+27"/>
<source>The entered address: %1 is invalid.
Please check the address and try again.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+11"/>
<source>The total amount of your MultiSend vector is over 100% of your stake reward</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+40"/>
<source>Saved the MultiSend to memory, but failed saving properties to the database.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+4"/>
<source>MultiSend Vector</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+21"/>
<source>Removed %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Could not locate address</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+9"/>
<source>Unable to activate MultiSend, check MultiSend vector</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Need to select to send on stake and/or masternode rewards</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>MultiSend activated but writing settings to DB failed</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>MultiSend activated</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>First Address Not Valid</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+13"/>
<source>MultiSend deactivated but writing settings to DB failed</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>MultiSend deactivated</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-97"/>
<source>Please Enter 1 - 100 for percent.</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>MultisigDialog</name>
<message>
<location filename="../forms/multisigdialog.ui" line="+20"/>
<source>Multisignature Address Interactions</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+19"/>
<source>Create MultiSignature &Address</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+14"/>
<source>How many people must sign to verify a transaction</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+16"/>
<source>Enter the minimum number of signatures required to sign transactions</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+11"/>
<source>Address Label:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+14"/>
<source>Add another address that could sign to verify a transaction from the multisig address.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>&Add Address / Key</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+20"/>
<source>Local addresses or public keys that can sign:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+71"/>
<source>Create a new multisig address</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+6"/>
<source>C&reate</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+14"/>
<location line="+345"/>
<location line="+179"/>
<source>Status:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-487"/>
<source>Use below to quickly import an address by its redeem. Don't forget to add a label before clicking import!
Keep in mind, the wallet will rescan the blockchain to find transactions containing the new address.
Please be patient after clicking import.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+23"/>
<source>&Import Redeem</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+24"/>
<source>&Create MultiSignature Tx</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+18"/>
<source>Inputs:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+13"/>
<source>Coin Control</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+12"/>
<source>Quantity Selected:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<location line="+14"/>
<source>0</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-7"/>
<source>Amount:</source>
<translation type="unfinished">Amount:</translation>
</message>
<message>
<location line="+22"/>
<source>Add an input to fund the outputs</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Add a Raw Input</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+56"/>
<source>Address / Amount:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+13"/>
<source>Add destinations to send GGH to</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Add &Destination</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+81"/>
<source>Create a transaction object using the given inputs to the given outputs</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Cr&eate</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+76"/>
<source>&Sign MultiSignature Tx</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+8"/>
<source>Transaction Hex:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+34"/>
<source>Sign the transaction from this wallet or from provided private keys</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>S&ign</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+26"/>
<source><html><head/><body><p>DISABLED until transaction has been signed enough times.</p></body></html></source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+6"/>
<source>Co&mmit</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+22"/>
<source>Add private keys to sign the transaction with</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Add Private &Key</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+11"/>
<source>Sign with only private keys (Not Recommened)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../multisigdialog.cpp" line="+299"/>
<source>Invalid Tx Hash.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+8"/>
<source>Vout position must be positive.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+535"/>
<source>Maximum possible addresses reached. (15)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+110"/>
<source>Vout Position: </source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+57"/>
<source>Amount: </source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+26"/>
<source>Maximum (15)</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>OpenURIDialog</name>
<message>
<location filename="../forms/openuridialog.ui" line="+14"/>
<source>Open URI</source>
<translation>Open URI</translation>
</message>
<message>
<location line="+6"/>
<source>Open payment request from URI or file</source>
<translation>Open payment request from URI or file</translation>
</message>
<message>
<location line="+9"/>
<source>URI:</source>
<translation>URI:</translation>
</message>
<message>
<location line="+10"/>
<source>Select payment request file</source>
<translation>Select payment request file</translation>
</message>
<message>
<location filename="../openuridialog.cpp" line="+45"/>
<source>Select payment request file to open</source>
<translation>Select payment request file to open</translation>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../forms/optionsdialog.ui" line="+14"/>
<source>Options</source>
<translation>Options</translation>
</message>
<message>
<location line="+13"/>
<source>&Main</source>
<translation>&Main</translation>
</message>
<message>
<location line="+18"/>
<source>Size of &database cache</source>
<translation>Size of &database cache</translation>
</message> | </message>
<message>
<location line="+27"/>
<source>Number of script &verification threads</source>
<translation>Number of script &verification threads</translation>
</message>
<message>
<location line="+13"/>
<source>(0 = auto, <0 = leave that many cores free)</source>
<translation>(0 = auto, <0 = leave that many cores free)</translation>
</message>
<message>
<location line="+141"/>
<source>W&allet</source>
<translation>W&allet</translation>
</message>
<message>
<location line="+53"/>
<source>If you disable the spending of unconfirmed change, the change from a transaction<br/>cannot be used until that transaction has at least one confirmation.<br/>This also affects how your balance is computed.</source>
<translation>If you disable the spending of unconfirmed change, the change from a transaction<br/>cannot be used until that transaction has at least one confirmation.<br/>This also affects how your balance is computed.</translation>
</message>
<message>
<location line="+33"/>
<source>Automatically open the GGCash client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+10"/>
<source>Accept connections from outside</source>
<translation>Accept connections from outside</translation>
</message>
<message>
<location line="+3"/>
<source>Allow incoming connections</source>
<translation>Allow incoming connections</translation>
</message>
<message>
<location line="+10"/>
<source>&Connect through SOCKS5 proxy (default proxy):</source>
<translation>&Connect through SOCKS5 proxy (default proxy):</translation>
</message>
<message>
<location line="-82"/>
<source>Expert</source>
<translation>Expert</translation>
</message>
<message>
<location line="-236"/>
<source>Automatically start GGCash after logging in to the system.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>&Start GGCash on system login</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+239"/>
<source>Whether to show coin control features or not.</source>
<translation>Whether to show coin control features or not.</translation>
</message>
<message>
<location line="+3"/>
<source>Enable coin &control features</source>
<translation>Enable coin &control features</translation>
</message>
<message>
<location line="+7"/>
<source>Show additional tab listing all your masternodes in first sub-tab<br/>and all masternodes on the network in second sub-tab.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Show Masternodes Tab</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+10"/>
<source>&Spend unconfirmed change</source>
<translation>&Spend unconfirmed change</translation>
</message>
<message>
<location line="+24"/>
<source>&Network</source>
<translation>&Network</translation>
</message>
<message>
<location line="+192"/>
<source>The user interface language can be set here. This setting will take effect after restarting GGCash.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+21"/>
<source>Language missing or translation incomplete? Help contributing translations here:
https://www.transifex.com/ggcash-project/ggcash-project-translations</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-204"/>
<source>Map port using &UPnP</source>
<translation>Map port using &UPnP</translation>
</message>
<message>
<location line="-187"/>
<source>Enable automatic minting of GGH units to zGGH</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+6"/>
<source>Enable zGGH Automint</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>Enable automatic zGGH minting from specific addresses</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+6"/>
<source>Enable Automint Addresses</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+11"/>
<source>Percentage of incoming GGH which get automatically converted to zGGH via Zerocoin Protocol (min: 10%)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Percentage of autominted zGGH</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+30"/>
<location line="+16"/>
<source>Wait with automatic conversion to Zerocoin until enough GGH for this denomination is available</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-13"/>
<source>Preferred Automint zGGH Denomination</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+40"/>
<source>Stake split threshold:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+98"/>
<source>Connect to the GGCash network through a SOCKS5 proxy.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+12"/>
<source>Proxy &IP:</source>
<translation>Proxy &IP:</translation>
</message>
<message>
<location line="+25"/>
<source>IP address of the proxy (e.g. IPv4: 127.0.0.1 / IPv6: ::1)</source>
<translation>IP address of the proxy (e.g. IPv4: 127.0.0.1 / IPv6: ::1)</translation>
</message>
<message>
<location line="+7"/>
<source>&Port:</source>
<translation>&Port:</translation>
</message>
<message>
<location line="+25"/>
<source>Port of the proxy (e.g. 9050)</source>
<translation>Port of the proxy (e.g. 9050)</translation>
</message>
<message>
<location line="+36"/>
<source>&Window</source>
<translation>&Window</translation>
</message>
<message>
<location line="+6"/>
<source>Show only a tray icon after minimizing the window.</source>
<translation>Show only a tray icon after minimizing the window.</translation>
</message>
<message>
<location line="+3"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation>&Minimize to the tray instead of the taskbar</translation>
</message>
<message>
<location line="+7"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</translation>
</message>
<message>
<location line="+3"/>
<source>M&inimize on close</source>
<translation>M&inimize on close</translation>
</message>
<message>
<location line="+21"/>
<source>&Display</source>
<translation>&Display</translation>
</message>
<message>
<location line="+8"/>
<source>User Interface &language:</source>
<translation>User Interface &language:</translation>
</message>
<message>
<location line="+53"/>
<source>User Interface Theme:</source>
<translation>User Interface Theme:</translation>
</message>
<message>
<location line="+27"/>
<source>Unit to show amounts in:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+10"/>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation>Choose the default subdivision unit to show in the interface and when sending coins.</translation>
</message>
<message>
<location line="+11"/>
<source>Decimal digits</source>
<translation>Decimal digits</translation>
</message>
<message>
<location line="+14"/>
<location line="+6"/>
<source>Hide empty balances</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>Hide orphan stakes in transaction lists</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Hide orphan stakes</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+24"/>
<location line="+10"/>
<source>Third party URLs (e.g. a block explorer) that appear in the transactions tab as context menu items. %s in the URL is replaced by transaction hash. Multiple URLs are separated by vertical bar |.</source>
<translation>Third party URLs (e.g. a block explorer) that appear in the transactions tab as context menu items. %s in the URL is replaced by transaction hash. Multiple URLs are separated by vertical bar |.</translation>
</message>
<message>
<location line="-7"/>
<source>Third party transaction URLs</source>
<translation>Third party transaction URLs</translation>
</message>
<message>
<location line="+41"/>
<source>Active command-line options that override above options:</source>
<translation>Active command-line options that override above options:</translation>
</message>
<message>
<location line="+43"/>
<source>Reset all client options to default.</source>
<translation>Reset all client options to default.</translation>
</message>
<message>
<location line="+3"/>
<source>&Reset Options</source>
<translation>&Reset Options</translation>
</message>
<message>
<location line="+61"/>
<source>&OK</source>
<translation>&OK</translation>
</message>
<message>
<location line="+13"/>
<source>&Cancel</source>
<translation>&Cancel</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="+92"/>
<source>Any</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+23"/>
<source>default</source>
<translation>default</translation>
</message>
<message>
<location line="+46"/>
<source>none</source>
<translation>none</translation>
</message>
<message>
<location line="+98"/>
<source>Confirm options reset</source>
<translation>Confirm options reset</translation>
</message>
<message>
<location line="+1"/>
<location line="+29"/>
<source>Client restart required to activate changes.</source>
<translation>Client restart required to activate changes.</translation>
</message>
<message>
<location line="-29"/>
<source>Client will be shutdown, do you want to proceed?</source>
<translation>Client will be shutdown, do you want to proceed?</translation>
</message>
<message>
<location line="+31"/>
<source>This change would require a client restart.</source>
<translation>This change would require a client restart.</translation>
</message>
<message>
<location line="+28"/>
<source>The supplied proxy address is invalid.</source>
<translation>The supplied proxy address is invalid.</translation>
</message>
<message>
<location line="+7"/>
<source>The supplied proxy port is invalid.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+8"/>
<source>The supplied proxy settings are invalid.</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="+20"/>
<source>Form</source>
<translation>Form</translation>
</message>
<message>
<location line="+270"/>
<location line="+192"/>
<location line="+403"/>
<source>Available:</source>
<translation>Available:</translation>
</message>
<message>
<location line="-579"/>
<location line="+192"/>
<source>Your current spendable balance</source>
<translation>Your current spendable balance</translation>
</message>
<message>
<location line="-160"/>
<source>Total Balance, including all unavailable coins.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+71"/>
<source>GGH Balance</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+130"/>
<source>Pending:</source>
<translation>Pending:</translation>
</message>
<message>
<location line="+16"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the spendable balance</source>
<translation>Total of transactions that have yet to be confirmed, and do not yet count toward the spendable balance</translation>
</message>
<message>
<location line="+41"/>
<location line="+351"/>
<source>Immature:</source>
<translation>Immature:</translation>
</message>
<message>
<location line="-335"/>
<source>Staked or masternode rewards that has not yet matured</source>
<translation>Staked or masternode rewards that has not yet matured</translation>
</message>
<message>
<location line="+85"/>
<source>Current locked balance in watch-only addresses</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+16"/>
<location line="+19"/>
<source>Your current GGCash balance, unconfirmed and immature transactions included</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+96"/>
<source>zGGH Balance</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+53"/>
<location line="+17"/>
<source>Mature: more than 20 confirmation and more than 1 mint of the same denomination after it was minted.
These zGGH are spendable.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+14"/>
<location line="+17"/>
<location line="+14"/>
<location line="+17"/>
<source>Unconfirmed: less than 20 confirmations
Immature: confirmed, but less than 1 mint of the same denomination after it was minted</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-725"/>
<location line="+852"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the GGCash network after a connection is established, but this process has not completed yet.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-990"/>
<source>OVERVIEW</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+125"/>
<source>Combined Balance (including unconfirmed and immature coins)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Combined Balance</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+355"/>
<source>Unconfirmed transactions to watch-only addresses</source>
<translation>Unconfirmed transactions to watch-only addresses</translation>
</message>
<message>
<location line="+57"/>
<source>Staked or masternode rewards in watch-only addresses that has not yet matured</source>
<translation>Staked or masternode rewards in watch-only addresses that has not yet matured</translation>
</message>
<message>
<location line="-315"/>
<location line="+394"/>
<location line="+261"/>
<source>Total:</source>
<translation>Total:</translation>
</message>
<message>
<location line="-220"/>
<source>Current total balance in watch-only addresses</source>
<translation>Current total balance in watch-only addresses</translation>
</message>
<message>
<location line="-285"/>
<source>Watch-only:</source>
<translation>Watch-only:</translation>
</message>
<message>
<location line="+51"/>
<source>Your current balance in watch-only addresses</source>
<translation>Your current balance in watch-only addresses</translation>
</message>
<message>
<location line="-61"/>
<source>Spendable:</source>
<translation>Spendable:</translation>
</message>
<message>
<location line="+191"/>
<location line="+19"/>
<source>Locked GGH or Masternode collaterals. These are excluded from zGGH minting.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-16"/>
<source>Locked:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+260"/>
<source>Unconfirmed:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+58"/>
<location line="+19"/>
<source>Your current zGGH balance, unconfirmed and immature zGGH included.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+84"/>
<source>Recent transactions</source>
<translation>Recent transactions</translation>
</message>
<message>
<location filename="../overviewpage.cpp" line="+131"/>
<location line="+1"/>
<source>out of sync</source>
<translation>out of sync</translation>
</message>
<message>
<location line="+111"/>
<source>Current percentage of zGGH.
If AutoMint is enabled this percentage will settle around the configured AutoMint percentage (default = 10%).
</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+4"/>
<source>AutoMint is currently enabled and set to </source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>To disable AutoMint add 'enablezeromint=0' in ggcash.conf.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>AutoMint is currently disabled.
To enable AutoMint change 'enablezeromint=0' to 'enablezeromint=1' in ggcash.conf</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>PaymentServer</name>
<message>
<location filename="../paymentserver.cpp" line="+291"/>
<location line="+205"/>
<location line="+33"/>
<location line="+104"/>
<location line="+12"/>
<location line="+14"/>
<source>Payment request error</source>
<translation>Payment request error</translation>
</message>
<message>
<location line="-275"/>
<location line="+12"/>
<location line="+5"/>
<source>URI handling</source>
<translation>URI handling</translation>
</message>
<message>
<location line="-16"/>
<source>Payment request fetch URL is invalid: %1</source>
<translation>Payment request fetch URL is invalid: %1</translation>
</message>
<message>
<location line="+29"/>
<source>Payment request file handling</source>
<translation>Payment request file handling</translation>
</message>
<message>
<location line="-18"/>
<source>Invalid payment address %1</source>
<translation>Invalid payment address %1</translation>
</message>
<message>
<location line="-104"/>
<source>Cannot start ggcash: click-to-pay handler</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+110"/>
<source>URI cannot be parsed! This can be caused by an invalid GGCash address or malformed URI parameters.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+13"/>
<source>Payment request file cannot be read! This can be caused by an invalid payment request file.</source>
<translation>Payment request file cannot be read! This can be caused by an invalid payment request file.</translation>
</message>
<message>
<location line="+67"/>
<location line="+8"/>
<location line="+30"/>
<source>Payment request rejected</source>
<translation>Payment request rejected</translation>
</message>
<message>
<location line="-38"/>
<source>Payment request network doesn't match client network.</source>
<translation>Payment request network doesn't match client network.</translation>
</message>
<message>
<location line="+8"/>
<source>Payment request has expired.</source>
<translation>Payment request has expired.</translation>
</message>
<message>
<location line="+6"/>
<source>Payment request is not initialized.</source>
<translation>Payment request is not initialized.</translation>
</message>
<message>
<location line="+25"/>
<source>Unverified payment requests to custom payment scripts are unsupported.</source>
<translation>Unverified payment requests to custom payment scripts are unsupported.</translation>
</message>
<message>
<location line="+8"/>
<source>Requested payment amount of %1 is too small (considered dust).</source>
<translation>Requested payment amount of %1 is too small (considered dust).</translation>
</message>
<message>
<location line="+48"/>
<source>Refund from %1</source>
<translation>Refund from %1</translation>
</message>
<message>
<location line="+40"/>
<source>Payment request %1 is too large (%2 bytes, allowed %3 bytes).</source>
<translation>Payment request %1 is too large (%2 bytes, allowed %3 bytes).</translation>
</message>
<message>
<location line="+6"/>
<source>Payment request DoS protection</source>
<translation>Payment request DoS protection</translation>
</message>
<message>
<location line="+5"/>
<source>Error communicating with %1: %2</source>
<translation>Error communicating with %1: %2</translation>
</message>
<message>
<location line="+18"/>
<source>Payment request cannot be parsed!</source>
<translation>Payment request cannot be parsed!</translation>
</message>
<message>
<location line="+9"/>
<source>Bad response from server %1</source>
<translation>Bad response from server %1</translation>
</message>
<message>
<location line="+20"/>
<source>Network request error</source>
<translation>Network request error</translation>
</message>
<message>
<location line="+11"/>
<source>Payment acknowledged</source>
<translation>Payment acknowledged</translation>
</message>
</context>
<context>
<name>PeerTableModel</name>
<message>
<location filename="../peertablemodel.cpp" line="+114"/>
<source>Address/Hostname</source>
<translation>Address/Hostname</translation>
</message>
<message>
<location line="+0"/>
<source>Version</source>
<translation>Version</translation>
</message>
<message>
<location line="+0"/>
<source>Ping Time</source>
<translation>Ping Time</translation>
</message>
</context>
<context>
<name>PrivacyDialog</name>
<message>
<location filename="../forms/privacydialog.ui" line="+157"/>
<source>Zerocoin Actions:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+10"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the GGCash network after a connection is established, but this process has not completed yet.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../privacydialog.cpp" line="+280"/>
<source>Mint Zerocoin</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../forms/privacydialog.ui" line="+373"/>
<location line="+30"/>
<source>0</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+229"/>
<source>zGGH</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-392"/>
<source>Reset Zerocoin Wallet DB. Deletes transactions that did not make it into the blockchain.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Reset</source>
<translation type="unfinished">Reset</translation>
</message>
<message>
<location line="-19"/>
<source>Rescan the complete blockchain for Zerocoin mints and their meta-data.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>ReScan</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+30"/>
<source>Status and/or Mesages from the last Mint Action.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-360"/>
<source>PRIVACY</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+462"/>
<source>zGGH Control</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>zGGH Selected:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+30"/>
<source>Quantity Selected:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+37"/>
<location filename="../privacydialog.cpp" line="+546"/>
<source>Spend Zerocoin. Without 'Pay To:' address creates payments to yourself.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<location filename="../privacydialog.cpp" line="-497"/>
<location line="+4"/>
<location line="+12"/>
<location line="+112"/>
<source>Spend Zerocoin</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>Available (mature and spendable) zGGH for spending</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Available Balance:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+16"/>
<source>Available (mature and spendable) zGGH for spending
zGGH are mature when they have more than 20 confirmations AND more than 2 mints of the same denomination after them were minted</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-367"/>
<location line="+49"/>
<location line="+323"/>
<location line="+370"/>
<location line="+544"/>
<location line="+31"/>
<location line="+31"/>
<location line="+31"/>
<location line="+31"/>
<location line="+31"/>
<location line="+31"/>
<location line="+31"/>
<location line="+31"/>
<source>0 zGGH</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-1126"/>
<source>Pay &To:</source>
<translation type="unfinished">Pay &To:</translation>
</message>
<message>
<location line="+27"/>
<source>The GGCash address to send the payment to. Creates local payment to yourself when empty.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>Choose previously used address</source>
<translation type="unfinished">Choose previously used address</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation type="unfinished">Alt+A</translation>
</message>
<message>
<location line="+7"/>
<source>Paste address from clipboard</source>
<translation type="unfinished">Paste address from clipboard</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation type="unfinished">Alt+P</translation>
</message>
<message>
<location line="+15"/>
<source>&Label:</source>
<translation type="unfinished">&Label:</translation>
</message>
<message>
<location line="+13"/>
<source>Enter a label for this address to add it to the list of used addresses</source>
<translation type="unfinished">Enter a label for this address to add it to the list of used addresses</translation>
</message>
<message>
<location line="+13"/>
<source>A&mount:</source>
<translation type="unfinished">A&mount:</translation>
</message>
<message>
<location line="+63"/>
<source>Convert Change to Zerocoin (might cost additional fees)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>If checked, the wallet tries to minimize the returning change instead of minimizing the number of spent denominations.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Minimize Change</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+47"/>
<source>Information about the available Zerocoin funds.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Zerocoin Stats:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-661"/>
<location line="+25"/>
<location line="+704"/>
<location line="+38"/>
<source>Total Balance including unconfirmed and immature zGGH</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-764"/>
<location line="+729"/>
<source>Total Zerocoin Balance:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+52"/>
<source>Denominations with value 1:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Denom. with value 1:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+29"/>
<location line="+53"/>
<location line="+53"/>
<location line="+53"/>
<location line="+53"/>
<location line="+53"/>
<location line="+53"/>
<location line="+53"/>
<source>Unconfirmed: less than 20 confirmations
Immature: confirmed, but less than 1 mint of the same denomination after it was minted</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+40"/>
<source>Show the current status of automatic zGGH minting.
To change the status (restart required):
- enable: add 'enablezeromint=1' to ggcash.conf
- disable: add 'enablezeromint=0' to ggcash.conf
To change the percentage (no restart required):
- menu Settings->Options->Percentage of autominted zGGH
</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+12"/>
<source>AutoMint Status</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-1178"/>
<location line="+1227"/>
<source>Global Supply:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+37"/>
<source>Denom. 1:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+31"/>
<source>Denom. 5:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+31"/>
<source>Denom. 10:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+31"/>
<source>Denom. 50:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+31"/>
<source>Denom. 100:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+31"/>
<source>Denom. 500:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+31"/>
<source>Denom. 1000:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+31"/>
<source>Denom. 5000:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-722"/>
<location line="+53"/>
<location line="+53"/>
<location line="+53"/>
<location line="+53"/>
<location line="+53"/>
<location line="+53"/>
<location line="+53"/>
<source>0 x</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-1092"/>
<source>Show zGGH denominations list</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Show Denominations</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+29"/>
<source>zGGH minting is DISABLED</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+131"/>
<source>zGGH spending is NOT private (links back to the mint transaction)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+575"/>
<source>Denominations with value 5:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Denom. with value 5:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+50"/>
<source>Denominations with value 10:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Denom. with value 10:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+50"/>
<source>Denominations with value 50:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Denom. with value 50:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+50"/>
<source>Denominations with value 100:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Denom. with value 100:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+50"/>
<source>Denominations with value 500:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Denom. with value 500:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+50"/>
<source>Denominations with value 1000:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Denom. with value 1000:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+50"/>
<source>Denominations with value 5000:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Denom. with value 5000:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+443"/>
<source>Hide Denominations</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+138"/>
<source>Priority:</source>
<translation type="unfinished">Priority:</translation>
</message>
<message>
<location line="+9"/>
<source>TextLabel</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+35"/>
<source>Fee:</source>
<translation type="unfinished">Fee:</translation>
</message>
<message>
<location line="+38"/>
<source>Dust:</source>
<translation type="unfinished">Dust:</translation>
</message>
<message>
<location line="+16"/>
<source>no</source>
<translation type="unfinished">no</translation>
</message>
<message>
<location line="+21"/>
<source>Bytes:</source>
<translation type="unfinished">Bytes:</translation>
</message>
<message>
<location line="+54"/>
<source>Insufficient funds!</source>
<translation type="unfinished">Insufficient funds!</translation>
</message>
<message>
<location line="+75"/>
<source>Coins automatically selected</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+56"/>
<source>medium</source>
<translation type="unfinished">medium</translation>
</message>
<message>
<location line="+71"/>
<source>Coin Control Features</source>
<translation type="unfinished">Coin Control Features</translation>
</message>
<message>
<location line="+63"/>
<source>If this is activated, but the change address is empty or invalid, change will be sent to a newly generated address.</source>
<translation type="unfinished">If this is activated, but the change address is empty or invalid, change will be sent to a newly generated address.</translation>
</message>
<message>
<location line="+3"/>
<source>Custom change address</source>
<translation type="unfinished">Custom change address</translation>
</message>
<message>
<location line="+83"/>
<source>Amount After Fee:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+40"/>
<source>Change:</source>
<translation type="unfinished">Change:</translation>
</message>
<message>
<location filename="../privacydialog.cpp" line="-413"/>
<source>out of sync</source>
<translation type="unfinished">out of sync</translation>
</message>
<message>
<location line="+6"/>
<source>Mint Status: Okay</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+195"/>
<source>Starting ResetMintZerocoin: rescanning complete blockchain, this will need up to 30 minutes depending on your hardware.
Please be patient...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+240"/>
<source>zGGH Spend #: </source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+16"/>
<source>zGGH Mint</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+294"/>
<source> <b>enabled</b>.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source> <b>disabled</b>.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source> Configured target percentage: <b></source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+17"/>
<source>zGGH is currently disabled due to maintenance.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-537"/>
<source>zGGH is currently undergoing maintenance.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-215"/>
<source>Denom. with value <b>1</b>:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Denom. with value <b>5</b>:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Denom. with value <b>10</b>:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Denom. with value <b>50</b>:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Denom. with value <b>100</b>:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Denom. with value <b>500</b>:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Denom. with value <b>1000</b>:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Denom. with value <b>5000</b>:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<location line="+716"/>
<source>AutoMint Status:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-713"/>
<source>Denom. <b>1</b>:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Denom. <b>5</b>:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Denom. <b>10</b>:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Denom. <b>50</b>:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Denom. <b>100</b>:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Denom. <b>500</b>:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Denom. <b>1000</b>:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Denom. <b>5000</b>:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+165"/>
<location line="+15"/>
<location line="+241"/>
<source>Duration: </source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-256"/>
<location line="+15"/>
<location line="+241"/>
<source> sec.
</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-246"/>
<source>Starting ResetSpentZerocoin: </source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+68"/>
<source>No 'Pay To' address provided, creating local payment</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+4"/>
<source>Invalid GGCash Address</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+12"/>
<source>Invalid Send Amount</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+22"/>
<source>Confirm additional Fees</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+21"/>
<source>Are you sure you want to send?<br /><br /></source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source> to address </source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+4"/>
<source> to a newly generated (unused and therefore anonymous) local address <br /></source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+6"/>
<source>Confirm send coins</source>
<translation type="unfinished">Confirm send coins</translation>
</message>
<message>
<location line="+23"/>
<source>Failed to fetch mint associated with serial hash</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+35"/>
<source>Spend Zerocoin failed with status = </source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+28"/>
<source>denomination: </source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-75"/>
<source>Spending Zerocoin.
Computationally expensive, might need several minutes depending on your hardware.
Please be patient...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+76"/>
<source>serial: </source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Spend is 1 of : </source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>value out: </source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>address: </source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+10"/>
<source>Sending successful, return code: </source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>txid: </source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>fee: </source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>ProposalFrame</name>
<message>
<location filename="../proposalframe.cpp" line="+92"/>
<source>Open proposal page in browser</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+16"/>
<source> remaining payment(s).</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+14"/>
<source>Yes:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+10"/>
<source>Abstain:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+10"/>
<source>No:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+42"/>
<source>A proposal URL can be used for phishing, scams and computer viruses. Open this link only if you trust the following URL.
</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+4"/>
<source>Open link</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Copy link</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+50"/>
<source>Wallet Locked</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+0"/>
<source>You must unlock your wallet to vote.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+6"/>
<source>Do you want to vote %1 on</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>using all your masternodes?</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Proposal Hash:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Proposal URL:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Confirm Vote</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+64"/>
<source>Vote Results</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>QObject</name>
<message>
<location filename="../bitcoinunits.cpp" line="+252"/>
<source>Amount</source>
<translation>Amount</translation>
</message>
<message>
<location filename="../guiutil.cpp" line="+105"/>
<source>Enter a GGCash address (e.g. %1)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+773"/>
<source>%1 d</source>
<translation>%1 d</translation>
</message>
<message>
<location line="+2"/>
<source>%1 h</source>
<translation>%1 h</translation>
</message>
<message>
<location line="+2"/>
<source>%1 m</source>
<translation>%1 m</translation>
</message>
<message>
<location line="+2"/>
<location line="+43"/>
<source>%1 s</source>
<translation>%1 s</translation>
</message>
<message>
<location line="-28"/>
<source>NETWORK</source>
<translation>NETWORK</translation>
</message>
<message>
<location line="+4"/>
<source>BLOOM</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>ZK_BLOOM</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>UNKNOWN</source>
<translation>UNKNOWN</translation>
</message>
<message>
<location line="+8"/>
<source>None</source>
<translation>None</translation>
</message>
<message>
<location line="+5"/>
<source>N/A</source>
<translation>N/A</translation>
</message>
<message>
<location line="+0"/>
<source>%1 ms</source>
<translation>%1 ms</translation>
</message>
<message>
<location filename="../ggcash.cpp" line="+75"/>
<location line="+7"/>
<location line="+13"/>
<location line="+19"/>
<source>GGCash Core</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-38"/>
<source>Error: Specified data directory "%1" does not exist.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>Error: Cannot parse configuration file: %1. Only use key=value syntax.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+12"/>
<source>Error: Invalid combination of -regtest and -testnet.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+20"/>
<source>Error reading masternode configuration file: %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+40"/>
<source>GGCash Core didn't yet exit safely...</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>QRImageWidget</name>
<message>
<location filename="../receiverequestdialog.cpp" line="+33"/>
<source>&Save Image...</source>
<translation>&Save Image...</translation>
</message>
<message>
<location line="+3"/>
<source>&Copy Image</source>
<translation>&Copy Image</translation>
</message>
<message>
<location line="+31"/>
<source>Save QR Code</source>
<translation>Save QR Code</translation>
</message>
<message>
<location line="+0"/>
<source>PNG Image (*.png)</source>
<translation>PNG Image (*.png)</translation>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<location filename="../forms/rpcconsole.ui" line="+14"/>
<source>Tools window</source>
<translation>Tools window</translation>
</message>
<message>
<location line="+10"/>
<source>&Information</source>
<translation>&Information</translation>
</message>
<message>
<location line="+15"/>
<source>General</source>
<translation>General</translation>
</message>
<message>
<location line="+164"/>
<source>Name</source>
<translation>Name</translation>
</message>
<message>
<location line="-157"/>
<source>Client name</source>
<translation>Client name</translation>
</message>
<message>
<location line="+10"/>
<location line="+23"/>
<location line="+26"/>
<location line="+26"/>
<location line="+23"/>
<location line="+23"/>
<location line="+36"/>
<location line="+23"/>
<location line="+23"/>
<location line="+36"/>
<location line="+23"/>
<location line="+522"/>
<location line="+23"/>
<location line="+23"/>
<location line="+23"/>
<location line="+23"/>
<location line="+23"/>
<location line="+23"/>
<location line="+23"/>
<location line="+23"/>
<location line="+23"/>
<location line="+23"/>
<location line="+23"/>
<location line="+23"/>
<location line="+23"/>
<location line="+23"/>
<location line="+26"/>
<location line="+23"/>
<location line="+78"/>
<location line="+26"/>
<location line="+29"/>
<location line="+29"/>
<source>N/A</source>
<translation>N/A</translation>
</message>
<message>
<location line="-1147"/>
<source>Number of connections</source>
<translation>Number of connections</translation>
</message>
<message>
<location line="+134"/>
<source>&Open</source>
<translation>&Open</translation>
</message>
<message>
<location line="-193"/>
<source>Startup time</source>
<translation>Startup time</translation>
</message>
<message>
<location line="+29"/>
<source>Network</source>
<translation>Network</translation>
</message>
<message>
<location line="+112"/>
<source>Last block time</source>
<translation>Last block time</translation>
</message>
<message>
<location line="+42"/>
<source>Debug log file</source>
<translation>Debug log file</translation>
</message>
<message>
<location line="-258"/>
<source>Using OpenSSL version</source>
<translation>Using OpenSSL version</translation>
</message>
<message>
<location line="+52"/>
<source>Build date</source>
<translation>Build date</translation>
</message>
<message>
<location line="+141"/>
<source>Current number of blocks</source>
<translation>Current number of blocks</translation>
</message>
<message>
<location line="-216"/>
<source>Client version</source>
<translation>Client version</translation>
</message>
<message>
<location line="+49"/>
<source>Using BerkeleyDB version</source>
<translation>Using BerkeleyDB version</translation>
</message>
<message>
<location line="+160"/>
<source>Block chain</source>
<translation>Block chain</translation>
</message>
<message>
<location line="+79"/>
<source>Open the GGCash debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-108"/>
<source>Number of Masternodes</source>
<translation>Number of Masternodes</translation>
</message>
<message>
<location line="+122"/>
<source>&Console</source>
<translation>&Console</translation>
</message>
<message>
<location line="+49"/>
<source>Clear console</source>
<translation>Clear console</translation>
</message>
<message>
<location line="+23"/>
<source>&Network Traffic</source>
<translation>&Network Traffic</translation>
</message>
<message>
<location line="+52"/>
<source>&Clear</source>
<translation>&Clear</translation>
</message>
<message>
<location line="+16"/>
<source>Totals</source>
<translation>Totals</translation>
</message>
<message>
<location line="+64"/>
<source>Received</source>
<translation>Received</translation>
</message>
<message>
<location line="+80"/>
<source>Sent</source>
<translation>Sent</translation>
</message>
<message>
<location line="+41"/>
<source>&Peers</source>
<translation>&Peers</translation>
</message>
<message>
<location line="+50"/>
<source>Banned peers</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+59"/>
<location filename="../rpcconsole.cpp" line="+329"/>
<location line="+727"/>
<source>Select a peer to view detailed information.</source>
<translation>Select a peer to view detailed information.</translation>
</message>
<message>
<location line="+25"/>
<source>Whitelisted</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+23"/>
<source>Direction</source>
<translation>Direction</translation>
</message>
<message>
<location line="+23"/>
<source>Protocol</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+23"/>
<source>Version</source>
<translation>Version</translation>
</message>
<message>
<location line="+23"/>
<source>Services</source>
<translation>Services</translation>
</message>
<message>
<location line="+92"/>
<source>Ban Score</source>
<translation>Ban Score</translation>
</message>
<message>
<location line="+23"/>
<source>Connection Time</source>
<translation>Connection Time</translation>
</message>
<message>
<location line="+23"/>
<source>Last Send</source>
<translation>Last Send</translation>
</message>
<message>
<location line="+23"/>
<source>Last Receive</source>
<translation>Last Receive</translation>
</message>
<message>
<location line="+23"/>
<source>Bytes Sent</source>
<translation>Bytes Sent</translation>
</message>
<message>
<location line="+23"/>
<source>Bytes Received</source>
<translation>Bytes Received</translation>
</message>
<message>
<location line="+23"/>
<source>Ping Time</source>
<translation>Ping Time</translation>
</message>
<message>
<location line="+89"/>
<source>&Wallet Repair</source>
<translation>&Wallet Repair</translation>
</message>
<message>
<location line="+347"/>
<source>Delete local Blockchain Folders</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-309"/>
<source>Wallet In Use:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-334"/>
<source>Starting Block</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+23"/>
<source>Synced Headers</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+23"/>
<source>Synced Blocks</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+184"/>
<source>The duration of a currently outstanding ping.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Ping Wait</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+23"/>
<source>Time Offset</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+101"/>
<source>Custom Backup Path:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+29"/>
<source>Custom zGGH Backup Path:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+29"/>
<source>Custom Backups Threshold:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+48"/>
<source>Salvage wallet</source>
<translation>Salvage wallet</translation>
</message>
<message>
<location line="+14"/>
<source>Attempt to recover private keys from a corrupt wallet.dat.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+16"/>
<source>Rescan blockchain files</source>
<translation>Rescan blockchain files</translation>
</message>
<message>
<location line="+14"/>
<source>Rescan the block chain for missing wallet transactions.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+16"/>
<source>Recover transactions 1</source>
<translation>Recover transactions 1</translation>
</message>
<message>
<location line="+14"/>
<source>Recover transactions from blockchain (keep meta-data, e.g. account owner).</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+16"/>
<source>Recover transactions 2</source>
<translation>Recover transactions 2</translation>
</message>
<message>
<location line="+14"/>
<source>Recover transactions from blockchain (drop meta-data).</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+16"/>
<source>Upgrade wallet format</source>
<translation>Upgrade wallet format</translation>
</message>
<message>
<location line="+44"/>
<source>Rebuild block chain index from current blk000??.dat files.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+23"/>
<source>-resync:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>Deletes all local blockchain folders so the wallet synchronizes from scratch.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-333"/>
<source>The buttons below will restart the wallet with command-line options to repair the wallet, fix issues with corrupt blockhain files or missing/obsolete transactions.</source>
<translation>The buttons below will restart the wallet with command-line options to repair the wallet, fix issues with corrupt blockhain files or missing/obsolete transactions.</translation>
</message>
<message>
<location line="-13"/>
<source>Wallet repair options.</source>
<translation>Wallet repair options.</translation>
</message>
<message>
<location line="+286"/>
<source>Upgrade wallet to latest format on startup. (Note: this is NOT an update of the wallet itself!)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+16"/>
<source>Rebuild index</source>
<translation>Rebuild index</translation>
</message>
<message>
<location filename="../rpcconsole.cpp" line="-376"/>
<source>In:</source>
<translation>In:</translation>
</message>
<message>
<location line="+1"/>
<source>Out:</source>
<translation>Out:</translation>
</message>
<message>
<location line="-39"/>
<source>Welcome to the GGCash RPC console.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-218"/>
<source>&Disconnect Node</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<location line="+1"/>
<location line="+1"/>
<location line="+1"/>
<source>Ban Node for</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-3"/>
<source>1 &hour</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>1 &day</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>1 &week</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>1 &year</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+46"/>
<source>&Unban Node</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+95"/>
<source>This will delete your local blockchain folders and the wallet will synchronize the complete Blockchain from scratch.<br /><br /></source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>This needs quite some time and downloads a lot of data.<br /><br /></source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Your transactions and funds will be visible again after the download has completed.<br /><br /></source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Do you want to continue?.<br /></source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Confirm resync Blockchain</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+70"/>
<source>Use up and down arrows to navigate history, and %1 to clear screen.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation>Type <b>help</b> for an overview of available commands.</translation>
</message>
<message>
<location line="+2"/>
<source>WARNING: Scammers have been active, telling users to type commands here, stealing their wallet contents. Do not use this console without fully understanding the ramifications of a command.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+141"/>
<source>%1 B</source>
<translation>%1 B</translation>
</message>
<message>
<location line="+2"/>
<source>%1 KB</source>
<translation>%1 KB</translation>
</message>
<message>
<location line="+2"/>
<source>%1 MB</source>
<translation>%1 MB</translation>
</message>
<message>
<location line="+2"/>
<source>%1 GB</source>
<translation>%1 GB</translation>
</message>
<message>
<location line="+123"/>
<source>(node id: %1)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>via %1</source>
<translation>via %1</translation>
</message>
<message>
<location line="+3"/>
<location line="+1"/>
<source>never</source>
<translation>never</translation>
</message>
<message>
<location line="+9"/>
<source>Inbound</source>
<translation>Inbound</translation>
</message>
<message>
<location line="+0"/>
<source>Outbound</source>
<translation>Outbound</translation>
</message>
<message>
<location line="+2"/>
<source>Yes</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+0"/>
<source>No</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+12"/>
<location line="+6"/>
<source>Unknown</source>
<translation>Unknown</translation>
</message>
</context>
<context>
<name>ReceiveCoinsDialog</name>
<message>
<location filename="../forms/receivecoinsdialog.ui" line="+245"/>
<source>Reuse one of the previously used receiving addresses.<br>Reusing addresses has security and privacy issues.<br>Do not use this unless re-generating a payment request made before.</source>
<translation>Reuse one of the previously used receiving addresses.<br>Reusing addresses has security and privacy issues.<br>Do not use this unless re-generating a payment request made before.</translation>
</message>
<message>
<location line="+3"/>
<source>R&euse an existing receiving address (not recommended)</source>
<translation>R&euse an existing receiving address (not recommended)</translation>
</message>
<message>
<location line="-30"/>
<source>&Message:</source>
<translation>&Message:</translation>
</message>
<message>
<location line="-87"/>
<location line="+16"/>
<source>An optional label to associate with the new receiving address.</source>
<translation>An optional label to associate with the new receiving address.</translation>
</message>
<message>
<location line="+7"/>
<location line="+25"/>
<source>Your receiving address. You can copy and use it to receive coins on this wallet. A new one will be generated once it is used.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-22"/>
<source>&Address:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+32"/>
<source>A&mount:</source>
<translation type="unfinished">A&mount:</translation>
</message>
<message>
<location line="+26"/>
<source>An optional message to attach to the payment request, which will be displayed when the request is opened. Note: The message will not be sent with the payment over the GGCash network.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-166"/>
<source>RECEIVE</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+182"/>
<source>An optional message to attach to the payment request, which will be displayed when the request is opened.<br>Note: The message will not be sent with the payment over the GGCash network.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-107"/>
<source>Use this form to request payments. All fields are <b>optional</b>.</source>
<translation>Use this form to request payments. All fields are <b>optional</b>.</translation>
</message>
<message>
<location line="+10"/>
<source>&Label:</source>
<translation>&Label:</translation>
</message>
<message>
<location line="+52"/>
<location line="+22"/>
<source>An optional amount to request. Leave this empty or zero to not request a specific amount.</source>
<translation>An optional amount to request. Leave this empty or zero to not request a specific amount.</translation>
</message>
<message>
<location line="+62"/>
<source>&Request payment</source>
<translation>&New address</translation>
</message>
<message>
<location line="+17"/>
<source>Clear all fields of the form.</source>
<translation>Clear all fields of the form.</translation>
</message>
<message>
<location line="+3"/>
<source>Clear</source>
<translation>Clear</translation>
</message>
<message>
<location line="+27"/>
<source>Receiving Addresses</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+55"/>
<source>Requested payments history</source>
<translation>Requested payments history</translation>
</message>
<message>
<location line="+62"/>
<source>Show the selected request (does the same as double clicking an entry)</source>
<translation>Show the selected request (does the same as double clicking an entry)</translation>
</message>
<message>
<location line="+3"/>
<source>Show</source>
<translation>Show</translation>
</message>
<message>
<location line="+17"/>
<source>Remove the selected entries from the list</source>
<translation>Remove the selected entries from the list</translation>
</message>
<message>
<location line="+3"/>
<source>Remove</source>
<translation>Remove</translation>
</message>
<message>
<location filename="../receivecoinsdialog.cpp" line="+41"/>
<source>Copy label</source>
<translation>Copy label</translation>
</message>
<message>
<location line="+1"/>
<source>Copy message</source>
<translation>Copy message</translation>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation>Copy amount</translation>
</message>
<message>
<location line="+1"/>
<source>Copy address</source>
<translation type="unfinished">Copy address</translation>
</message>
</context>
<context>
<name>ReceiveRequestDialog</name>
<message>
<location filename="../forms/receiverequestdialog.ui" line="+29"/>
<source>QR Code</source>
<translation>QR Code</translation>
</message>
<message>
<location line="+46"/>
<source>Copy &URI</source>
<translation>Copy &URI</translation>
</message>
<message>
<location line="+10"/>
<source>Copy &Address</source>
<translation>Copy &Address</translation>
</message>
<message>
<location line="+10"/>
<source>&Save Image...</source>
<translation>&Save Image...</translation>
</message>
<message>
<location filename="../receiverequestdialog.cpp" line="+63"/>
<source>Request payment to %1</source>
<translation>Request payment to %1</translation>
</message>
<message>
<location line="+6"/>
<source>Payment information</source>
<translation>Payment information</translation>
</message>
<message>
<location line="+1"/>
<source>URI</source>
<translation>URI</translation>
</message>
<message>
<location line="+2"/>
<source>Address</source>
<translation>Address</translation>
</message>
<message>
<location line="+2"/>
<source>Amount</source>
<translation>Amount</translation>
</message>
<message>
<location line="+2"/>
<source>Label</source>
<translation>Label</translation>
</message>
<message>
<location line="+2"/>
<source>Message</source>
<translation>Message</translation>
</message>
<message>
<location line="+8"/>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation>Resulting URI too long, try to reduce the text for label / message.</translation>
</message>
<message>
<location line="+4"/>
<source>Error encoding URI into QR Code.</source>
<translation>Error encoding URI into QR Code.</translation>
</message>
</context>
<context>
<name>RecentRequestsTableModel</name>
<message>
<location filename="../recentrequeststablemodel.cpp" line="+27"/>
<source>Date</source>
<translation>Date</translation>
</message>
<message>
<location line="+0"/>
<source>Label</source>
<translation>Label</translation>
</message>
<message>
<location line="+0"/>
<source>Message</source>
<translation>Message</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation type="unfinished">Address</translation>
</message>
<message>
<location line="+89"/>
<source>Amount</source>
<translation>Amount</translation>
</message>
<message>
<location line="-52"/>
<source>(no label)</source>
<translation>(no label)</translation>
</message>
<message>
<location line="+8"/>
<source>(no message)</source>
<translation>(no message)</translation>
</message>
<message>
<location line="+6"/>
<source>(no amount)</source>
<translation>(no amount)</translation>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="+17"/>
<location filename="../sendcoinsdialog.cpp" line="+234"/>
<location line="+25"/>
<location line="+413"/>
<source>Send Coins</source>
<translation>Send Coins</translation>
</message>
<message>
<location line="+47"/>
<source>SEND</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+147"/>
<source>Coin Control Features</source>
<translation>Coin Control Features</translation>
</message>
<message>
<location line="+70"/>
<source>Insufficient funds!</source>
<translation>Insufficient funds!</translation>
</message>
<message>
<location line="+80"/>
<source>Quantity:</source>
<translation>Quantity:</translation>
</message>
<message>
<location line="+35"/>
<source>Bytes:</source>
<translation>Bytes:</translation>
</message>
<message>
<location line="+48"/>
<source>Amount:</source>
<translation>Amount:</translation>
</message>
<message>
<location line="+32"/>
<source>Priority:</source>
<translation>Priority:</translation>
</message>
<message>
<location line="+13"/>
<source>medium</source>
<translation>medium</translation>
</message>
<message>
<location line="+35"/>
<source>Fee:</source>
<translation>Fee:</translation>
</message>
<message>
<location line="+32"/>
<source>Dust:</source>
<translation>Dust:</translation>
</message>
<message>
<location line="+13"/>
<source>no</source>
<translation>no</translation>
</message>
<message>
<location line="+35"/>
<source>After Fee:</source>
<translation>After Fee:</translation>
</message>
<message>
<location line="+32"/>
<source>Change:</source>
<translation>Change:</translation>
</message>
<message>
<location line="+62"/>
<source>If this is activated, but the change address is empty or invalid, change will be sent to a newly generated address.</source>
<translation>If this is activated, but the change address is empty or invalid, change will be sent to a newly generated address.</translation>
</message>
<message>
<location line="+3"/>
<source>Custom change address</source>
<translation>Custom change address</translation>
</message>
<message>
<location line="+76"/>
<source>Split UTXO</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+25"/>
<source># of outputs</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>UTXO Size:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>0 GGH</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+149"/>
<source>SwiftX technology allows for near instant transactions - A flat fee of 0.01 GGH applies</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+176"/>
<source>Transaction Fee:</source>
<translation>Transaction Fee:</translation>
</message>
<message>
<location line="+14"/>
<source>Choose...</source>
<translation>Choose...</translation>
</message>
<message>
<location line="+7"/>
<source>collapse fee-settings</source>
<translation>collapse fee-settings</translation>
</message>
<message>
<location line="+3"/>
<source>Minimize</source>
<translation>Minimize</translation>
</message>
<message>
<location line="+126"/>
<source>per kilobyte</source>
<translation>per kilobyte</translation>
</message>
<message>
<location line="+16"/>
<source>total at least</source>
<translation>total at least</translation>
</message>
<message>
<location line="+46"/>
<source>(read the tooltip)</source>
<translation>(read the tooltip)</translation>
</message>
<message>
<location line="-82"/>
<source>Custom:</source>
<translation>Custom:</translation>
</message>
<message>
<location line="-202"/>
<source>(Smart fee not initialized yet. This usually takes a few blocks...)</source>
<translation>(Smart fee not initialized yet. This usually takes a few blocks...)</translation>
</message>
<message>
<location line="-101"/>
<source>SwiftX</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>Confirmation time:</source>
<translation>Confirmation time:</translation>
</message>
<message>
<location line="-723"/>
<source>Open Coin Control...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+10"/>
<source>Coins automatically selected</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1026"/>
<source>If the custom fee is set to 1000 uGGHs and the transaction is only 250 bytes, then "per kilobyte" only pays 250 uGGHs in fee,<br />while "at least" pays 1000 uGGHs. For transactions bigger than a kilobyte both pay by kilobyte.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+16"/>
<source>If the custom fee is set to 1000 uGGHs and the transaction is only 250 bytes, then "per kilobyte" only pays 250 uGGHs in fee,<br />while "total at least" pays 1000 uGGHs. For transactions bigger than a kilobyte both pay by kilobyte.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+33"/>
<location line="+13"/>
<source>Paying only the minimum fee is just fine as long as there is less transaction volume than space in the blocks.<br />But be aware that this can end up in a never confirming transaction once there is more demand for GGCash transactions than the network can process.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-365"/>
<source>normal</source>
<translation>normal</translation>
</message>
<message>
<location line="+44"/>
<source>fast</source>
<translation>fast</translation>
</message>
<message>
<location line="+351"/>
<source>Recommended</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+115"/>
<source>Send as zero-fee transaction if possible</source>
<translation>Send as zero-fee transaction if possible</translation>
</message>
<message>
<location line="+7"/>
<source>(confirmation may take longer)</source>
<translation>(confirmation may take longer)</translation>
</message>
<message>
<location line="+71"/>
<source>Confirm the send action</source>
<translation>Confirm the send action</translation>
</message>
<message>
<location line="+3"/>
<source>S&end</source>
<translation>S&end</translation>
</message>
<message>
<location line="+23"/>
<source>Clear all fields of the form.</source>
<translation>Clear all fields of the form.</translation>
</message>
<message>
<location line="+3"/>
<source>Clear &All</source>
<translation>Clear &All</translation>
</message>
<message>
<location line="+14"/>
<source>Send to multiple recipients at once</source>
<translation>Send to multiple recipients at once</translation>
</message>
<message>
<location line="+3"/>
<source>Add &Recipient</source>
<translation>Add &Recipient</translation>
</message>
<message>
<location line="+35"/>
<source>Anonymized GGH</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>Balance:</source>
<translation>Balance:</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="-590"/>
<source>Copy quantity</source>
<translation>Copy quantity</translation>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation>Copy amount</translation>
</message>
<message>
<location line="+1"/>
<source>Copy fee</source>
<translation>Copy fee</translation>
</message>
<message>
<location line="+1"/>
<source>Copy after fee</source>
<translation>Copy after fee</translation>
</message>
<message>
<location line="+1"/>
<source>Copy bytes</source>
<translation>Copy bytes</translation>
</message>
<message>
<location line="+1"/>
<source>Copy priority</source>
<translation>Copy priority</translation>
</message>
<message>
<location line="+1"/>
<source>Copy dust</source>
<translation>Copy dust</translation>
</message>
<message>
<location line="+1"/>
<source>Copy change</source>
<translation>Copy change</translation>
</message>
<message>
<location line="+146"/>
<source>The split block tool does not work when sending to outside addresses. Try again.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+25"/>
<source>The split block tool does not work with multiple addresses. Try again.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+653"/>
<source>Warning: Invalid GGCash address</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-615"/>
<location line="+4"/>
<location line="+4"/>
<location line="+3"/>
<source>%1 to %2</source>
<translation>%1 to %2</translation>
</message>
<message>
<location line="+51"/>
<source>Are you sure you want to send?</source>
<translation>Are you sure you want to send?</translation>
</message>
<message>
<location line="+8"/>
<source>are added as transaction fee</source>
<translation>are added as transaction fee</translation>
</message>
<message>
<location line="+18"/>
<source>Total Amount = <b>%1</b><br />= %2</source>
<translation>Total Amount = <b>%1</b><br />= %2</translation>
</message>
<message>
<location line="+19"/>
<source>Confirm send coins</source>
<translation>Confirm send coins</translation>
</message>
<message>
<location line="+247"/>
<source>A fee %1 times higher than %2 per kB is considered an insanely high fee.</source>
<translation>A fee %1 times higher than %2 per kB is considered an insanely high fee.</translation>
</message>
<message numerus="yes">
<location line="+121"/>
<source>Estimated to begin confirmation within %n block(s).</source>
<translation>
<numerusform>Estimated to begin confirmation within %n block.</numerusform>
<numerusform>Estimated to begin confirmation within %n blocks.</numerusform>
</translation>
</message>
<message>
<location line="-152"/>
<source>The recipient address is not valid, please recheck.</source>
<translation>The recipient address is not valid, please recheck.</translation>
</message>
<message>
<location line="-346"/>
<source>using SwiftX</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+38"/>
<source> split into %1 outputs using the UTXO splitter.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+89"/>
<source><b>(%1 of %2 entries displayed)</b></source>
<translation><b>(%1 of %2 entries displayed)</b></translation>
</message>
<message>
<location line="+222"/>
<source>The amount to pay must be larger than 0.</source>
<translation>The amount to pay must be larger than 0.</translation>
</message>
<message>
<location line="+3"/>
<source>The amount exceeds your balance.</source>
<translation>The amount exceeds your balance.</translation>
</message>
<message>
<location line="+3"/>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation>The total exceeds your balance when the %1 transaction fee is included.</translation>
</message>
<message>
<location line="+3"/>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation>Duplicate address found, can only send to each address once per send operation.</translation>
</message>
<message>
<location line="+3"/>
<source>Transaction creation failed!</source>
<translation>Transaction creation failed!</translation>
</message>
<message>
<location line="+4"/>
<source>The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation>The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</translation>
</message>
<message>
<location line="+8"/>
<source>Error: The wallet was unlocked only to anonymize coins.</source>
<translation>Error: The wallet was unlocked only to anonymize coins.</translation>
</message>
<message>
<location line="+16"/>
<source>Error: The wallet was unlocked only to anonymize coins. Unlock canceled.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+86"/>
<source>Pay only the minimum fee of %1</source>
<translation>Pay only the minimum fee of %1</translation>
</message>
<message>
<location line="+13"/>
<source>Estimated to get 6 confirmations near instantly with <b>SwiftX</b>!</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+158"/>
<source>Warning: Unknown change address</source>
<translation>Warning: Unknown change address</translation>
</message>
<message>
<location line="+10"/>
<source>(no label)</source>
<translation>(no label)</translation>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="+21"/>
<source>This is a normal payment.</source>
<translation>This is a normal payment.</translation>
</message>
<message>
<location line="+15"/>
<source>Pay &To:</source>
<translation>Pay &To:</translation>
</message>
<message>
<location line="+18"/>
<source>The GGCash address to send the payment to</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>Choose previously used address</source>
<translation>Choose previously used address</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="+7"/>
<source>Paste address from clipboard</source>
<translation>Paste address from clipboard</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+7"/>
<location line="+524"/>
<location line="+536"/>
<source>Remove this entry</source>
<translation>Remove this entry</translation>
</message>
<message>
<location line="-1044"/>
<source>&Label:</source>
<translation>&Label:</translation>
</message>
<message>
<location line="+13"/>
<source>Enter a label for this address to add it to the list of used addresses</source>
<translation>Enter a label for this address to add it to the list of used addresses</translation>
</message>
<message>
<location line="+7"/>
<location line="+521"/>
<location line="+536"/>
<source>A&mount:</source>
<translation>A&mount:</translation>
</message>
<message>
<location line="-1041"/>
<source>Message:</source>
<translation>Message:</translation>
</message>
<message>
<location line="+10"/>
<source>A message that was attached to the GGCash: URI which will be stored with the transaction for your reference. Note: This message will not be sent over the GGCash network.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+426"/>
<source>This is an unverified payment request.</source>
<translation>This is an unverified payment request.</translation>
</message>
<message>
<location line="+18"/>
<location line="+532"/>
<source>Pay To:</source>
<translation>Pay To:</translation>
</message>
<message>
<location line="-498"/>
<location line="+536"/>
<source>Memo:</source>
<translation>Memo:</translation>
</message>
<message>
<location line="-56"/>
<source>This is a verified payment request.</source>
<translation>This is a verified payment request.</translation>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="+30"/>
<source>Enter a label for this address to add it to your address book</source>
<translation>Enter a label for this address to add it to your address book</translation>
</message>
</context>
<context>
<name>ShutdownWindow</name>
<message>
<location filename="../utilitydialog.cpp" line="+75"/>
<source>GGCash Core is shutting down...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Do not shut down the computer until this window disappears.</source>
<translation>Do not shut down the computer until this window disappears.</translation>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="+14"/>
<source>Signatures - Sign / Verify a Message</source>
<translation>Signatures - Sign / Verify a Message</translation>
</message>
<message>
<location line="+13"/>
<source>&Sign Message</source>
<translation>&Sign Message</translation>
</message>
<message>
<location line="+6"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</translation>
</message>
<message>
<location line="+15"/>
<source>The GGCash address to sign the message with</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<location line="+195"/>
<source>Choose previously used address</source>
<translation>Choose previously used address</translation>
</message>
<message>
<location line="-185"/>
<location line="+195"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="-188"/>
<source>Paste address from clipboard</source>
<translation>Paste address from clipboard</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+9"/>
<source>Enter the message you want to sign here</source>
<translation>Enter the message you want to sign here</translation>
</message>
<message>
<location line="+7"/>
<source>Signature</source>
<translation>Signature</translation>
</message>
<message>
<location line="+24"/>
<source>Copy the current signature to the system clipboard</source>
<translation>Copy the current signature to the system clipboard</translation>
</message>
<message>
<location line="+18"/>
<source>Sign the message to prove you own this GGCash address</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+103"/>
<source>The GGCash address the message was signed with</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+34"/>
<source>Verify the message to ensure it was signed with the specified GGCash address</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-134"/>
<source>Sign &Message</source>
<translation>Sign &Message</translation>
</message>
<message>
<location line="+14"/>
<source>Reset all sign message fields</source>
<translation>Reset all sign message fields</translation>
</message>
<message>
<location line="+3"/>
<location line="+137"/>
<source>Clear &All</source>
<translation>Clear &All</translation>
</message>
<message>
<location line="-78"/>
<source>&Verify Message</source>
<translation>&Verify Message</translation>
</message>
<message>
<location line="+6"/>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</translation>
</message>
<message>
<location line="+55"/>
<source>Verify &Message</source>
<translation>Verify &Message</translation>
</message>
<message>
<location line="+14"/>
<source>Reset all verify message fields</source>
<translation>Reset all verify message fields</translation>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="+30"/>
<source>Click "Sign Message" to generate signature</source>
<translation>Click "Sign Message" to generate signature</translation>
</message>
<message>
<location line="+79"/>
<location line="+73"/>
<source>The entered address is invalid.</source>
<translation>The entered address is invalid.</translation>
</message>
<message>
<location line="-73"/>
<location line="+7"/>
<location line="+66"/>
<location line="+7"/>
<source>Please check the address and try again.</source>
<translation>Please check the address and try again.</translation>
</message>
<message>
<location line="-73"/>
<location line="+73"/>
<source>The entered address does not refer to a key.</source>
<translation>The entered address does not refer to a key.</translation>
</message>
<message>
<location line="-66"/>
<source>Wallet unlock was cancelled.</source>
<translation>Wallet unlock was cancelled.</translation>
</message>
<message>
<location line="+7"/>
<source>Private key for the entered address is not available.</source>
<translation>Private key for the entered address is not available.</translation>
</message>
<message>
<location line="+11"/>
<source>Message signing failed.</source>
<translation>Message signing failed.</translation>
</message>
<message>
<location line="+5"/>
<source>Message signed.</source>
<translation>Message signed.</translation>
</message>
<message>
<location line="+53"/>
<source>The signature could not be decoded.</source>
<translation>The signature could not be decoded.</translation>
</message>
<message>
<location line="+0"/>
<location line="+12"/>
<source>Please check the signature and try again.</source>
<translation>Please check the signature and try again.</translation>
</message>
<message>
<location line="+0"/>
<source>The signature did not match the message digest.</source>
<translation>The signature did not match the message digest.</translation>
</message>
<message>
<location line="+6"/>
<source>Message verification failed.</source>
<translation>Message verification failed.</translation>
</message>
<message>
<location line="+5"/>
<source>Message verified.</source>
<translation>Message verified.</translation>
</message>
</context>
<context>
<name>SplashScreen</name>
<message>
<location filename="../splashscreen.cpp" line="+36"/>
<source>GGCash Core</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Version %1</source>
<translation>Version %1</translation>
</message>
<message>
<location line="+1"/>
<source>The Bitcoin Core developers</source>
<translation>The Bitcoin Core developers</translation>
</message>
<message>
<location line="+1"/>
<source>The Dash Core developers</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>The GGCash Core developers</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../networkstyle.cpp" line="+20"/>
<source>[testnet]</source>
<translation>[testnet]</translation>
</message>
</context>
<context>
<name>TrafficGraphWidget</name>
<message>
<location filename="../trafficgraphwidget.cpp" line="+79"/>
<source>KB/s</source>
<translation>KB/s</translation>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message numerus="yes">
<location filename="../transactiondesc.cpp" line="+33"/>
<source>Open for %n more block(s)</source>
<translation>
<numerusform>Open for %n more block</numerusform>
<numerusform>Open for %n more blocks</numerusform>
</translation>
</message>
<message>
<location line="+2"/>
<source>Open until %1</source>
<translation>Open until %1</translation>
</message>
<message>
<location line="+8"/>
<location line="+11"/>
<location line="+10"/>
<location line="+12"/>
<source>conflicted</source>
<translation>conflicted</translation>
</message>
<message>
<location line="+2"/>
<source>%1/offline</source>
<translation>%1/offline</translation>
</message>
<message>
<location line="+2"/>
<source>%1/unconfirmed</source>
<translation>%1/unconfirmed</translation>
</message>
<message>
<location line="-10"/>
<location line="+12"/>
<source>%1 confirmations</source>
<translation>%1 confirmations</translation>
</message>
<message>
<location line="-37"/>
<source>%1/offline (verified via SwiftX)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>%1/confirmed (verified via SwiftX)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>%1 confirmations (verified via SwiftX)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>%1/offline (SwiftX verification in progress - %2 of %3 signatures)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>%1/confirmed (SwiftX verification in progress - %2 of %3 signatures )</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>%1 confirmations (SwiftX verification in progress - %2 of %3 signatures)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+6"/>
<source>%1/offline (SwiftX verification failed)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>%1/confirmed (SwiftX verification failed)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+29"/>
<source>Status</source>
<translation>Status</translation>
</message>
<message>
<location line="+4"/>
<source>, has not been successfully broadcast yet</source>
<translation>, has not been successfully broadcast yet</translation>
</message>
<message numerus="yes">
<location line="+2"/>
<source>, broadcast through %n node(s)</source>
<translation>
<numerusform>, broadcast through %n node</numerusform>
<numerusform>, broadcast through %n nodes</numerusform>
</translation>
</message>
<message>
<location line="+4"/>
<source>Date</source>
<translation>Date</translation>
</message>
<message>
<location line="+6"/>
<source>Source</source>
<translation>Source</translation>
</message>
<message>
<location line="+0"/>
<source>Generated</source>
<translation>Generated</translation>
</message>
<message>
<location line="+3"/>
<location line="+8"/>
<location line="+63"/>
<source>From</source>
<translation>From</translation>
</message>
<message>
<location line="-63"/>
<source>unknown</source>
<translation>unknown</translation>
</message>
<message>
<location line="+1"/>
<location line="+19"/>
<location line="+58"/>
<source>To</source>
<translation>To</translation>
</message>
<message>
<location line="-75"/>
<source>own address</source>
<translation>own address</translation>
</message>
<message>
<location line="+0"/>
<location line="+60"/>
<source>watch-only</source>
<translation>watch-only</translation>
</message>
<message>
<location line="-58"/>
<source>label</source>
<translation>label</translation>
</message>
<message>
<location line="+32"/>
<location line="+10"/>
<location line="+45"/>
<location line="+23"/>
<location line="+50"/>
<source>Credit</source>
<translation>Credit</translation>
</message>
<message numerus="yes">
<location line="-126"/>
<source>matures in %n more block(s)</source>
<translation>
<numerusform>matures in %n more block</numerusform>
<numerusform>matures in %n more blocks</numerusform>
</translation>
</message>
<message>
<location line="+2"/>
<source>not accepted</source>
<translation>not accepted</translation>
</message>
<message>
<location line="+49"/>
<location line="+22"/>
<location line="+50"/>
<source>Debit</source>
<translation>Debit</translation>
</message>
<message>
<location line="-63"/>
<source>Total debit</source>
<translation>Total debit</translation>
</message>
<message>
<location line="+1"/>
<source>Total credit</source>
<translation>Total credit</translation>
</message>
<message>
<location line="+5"/>
<source>Transaction fee</source>
<translation>Transaction fee</translation>
</message>
<message>
<location line="+14"/>
<source>Net amount</source>
<translation>Net amount</translation>
</message>
<message>
<location line="+6"/>
<location line="+10"/>
<source>Message</source>
<translation>Message</translation>
</message>
<message>
<location line="-8"/>
<source>Comment</source>
<translation>Comment</translation>
</message>
<message>
<location line="+2"/>
<source>Transaction ID</source>
<translation>Transaction ID</translation>
</message>
<message>
<location line="+1"/>
<source>Output index</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+16"/>
<source>Merchant</source>
<translation>Merchant</translation>
</message>
<message>
<location line="+6"/>
<source>Generated coins must mature %1 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation>Generated coins must mature %1 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</translation>
</message>
<message>
<location line="+7"/>
<source>Debug information</source>
<translation>Debug information</translation>
</message>
<message>
<location line="+8"/>
<source>Transaction</source>
<translation>Transaction</translation>
</message>
<message>
<location line="+3"/>
<source>Inputs</source>
<translation>Inputs</translation>
</message>
<message>
<location line="+17"/>
<source>Amount</source>
<translation>Amount</translation>
</message>
<message>
<location line="+1"/>
<location line="+1"/>
<source>true</source>
<translation>true</translation>
</message>
<message>
<location line="-1"/>
<location line="+1"/>
<source>false</source>
<translation>false</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="+14"/>
<source>Transaction details</source>
<translation>Transaction details</translation>
</message>
<message>
<location line="+6"/>
<source>This pane shows a detailed description of the transaction</source>
<translation>This pane shows a detailed description of the transaction</translation>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="+215"/>
<source>Date</source>
<translation>Date</translation>
</message>
<message>
<location line="+0"/>
<source>Type</source>
<translation>Type</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Address</translation>
</message>
<message numerus="yes">
<location line="+57"/>
<source>Open for %n more block(s)</source>
<translation>
<numerusform>Open for %n more block</numerusform>
<numerusform>Open for %n more blocks</numerusform>
</translation>
</message>
<message>
<location line="+3"/>
<source>Open until %1</source>
<translation>Open until %1</translation>
</message>
<message>
<location line="+3"/>
<source>Offline</source>
<translation>Offline</translation>
</message>
<message>
<location line="+3"/>
<source>Unconfirmed</source>
<translation>Unconfirmed</translation>
</message>
<message>
<location line="+3"/>
<source>Confirming (%1 of %2 recommended confirmations)</source>
<translation>Confirming (%1 of %2 recommended confirmations)</translation>
</message>
<message>
<location line="+3"/>
<source>Confirmed (%1 confirmations)</source>
<translation>Confirmed (%1 confirmations)</translation>
</message>
<message>
<location line="+3"/>
<source>Conflicted</source>
<translation>Conflicted</translation>
</message>
<message>
<location line="+3"/>
<source>Immature (%1 confirmations, will be available after %2)</source>
<translation>Immature (%1 confirmations, will be available after %2)</translation>
</message>
<message>
<location line="+3"/>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation>This block was not received by any other nodes and will probably not be accepted!</translation>
</message>
<message>
<location line="+38"/>
<source>Received with</source>
<translation>Received with</translation>
</message>
<message>
<location line="+2"/>
<source>Masternode Reward</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Received from</source>
<translation>Received from</translation>
</message>
<message>
<location line="+2"/>
<source>Received via Obfuscation</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<source>GGH Stake</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>zGGH Stake</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+4"/>
<source>Obfuscation Denominate</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Obfuscation Collateral Payment</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Obfuscation Make Collateral Inputs</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Obfuscation Create Denominations</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+4"/>
<source>Converted GGH to zGGH</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Spent zGGH</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Received GGH from zGGH</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Minted Change as zGGH from zGGH Spend</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Converted zGGH to GGH</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+56"/>
<source>Anonymous (zGGH Transaction)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Anonymous (zGGH Stake)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-86"/>
<source>Sent to</source>
<translation>Sent to</translation>
</message>
<message>
<location line="-44"/>
<source>Orphan Block - Generated but not accepted. This does not impact your holdings.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+46"/>
<source>Payment to yourself</source>
<translation>Payment to yourself</translation>
</message>
<message>
<location line="+6"/>
<source>Mined</source>
<translation>Mined</translation>
</message>
<message>
<location line="+10"/>
<source>Obfuscated</source>
<translation>Obfuscated</translation>
</message>
<message>
<location line="+44"/>
<source>watch-only</source>
<translation>watch-only</translation>
</message>
<message>
<location line="+27"/>
<source>(n/a)</source>
<translation>(n/a)</translation>
</message>
<message>
<location line="+211"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation>Transaction status. Hover over this field to show number of confirmations.</translation>
</message>
<message>
<location line="+2"/>
<source>Date and time that the transaction was received.</source>
<translation>Date and time that the transaction was received.</translation>
</message>
<message>
<location line="+2"/>
<source>Type of transaction.</source>
<translation>Type of transaction.</translation>
</message>
<message>
<location line="+2"/>
<source>Whether or not a watch-only address is involved in this transaction.</source>
<translation>Whether or not a watch-only address is involved in this transaction.</translation>
</message>
<message>
<location line="+2"/>
<source>Destination address of transaction.</source>
<translation>Destination address of transaction.</translation>
</message>
<message>
<location line="+2"/>
<source>Amount removed from or added to balance.</source>
<translation>Amount removed from or added to balance.</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="+69"/>
<location line="+17"/>
<source>All</source>
<translation>All</translation>
</message>
<message>
<location line="-16"/>
<source>Today</source>
<translation>Today</translation>
</message>
<message>
<location line="+1"/>
<source>This week</source>
<translation>This week</translation>
</message>
<message>
<location line="+1"/>
<source>This month</source>
<translation>This month</translation>
</message>
<message>
<location line="+1"/>
<source>Last month</source>
<translation>Last month</translation>
</message>
<message>
<location line="+1"/>
<source>This year</source>
<translation>This year</translation>
</message>
<message>
<location line="+1"/>
<source>Range...</source>
<translation>Range...</translation>
</message>
<message>
<location line="+12"/>
<source>Most Common</source>
<translation>Most Common</translation>
</message>
<message>
<location line="+1"/>
<source>Received with</source>
<translation>Received with</translation>
</message>
<message>
<location line="+1"/>
<source>Sent to</source>
<translation>Sent to</translation>
</message>
<message>
<location line="+11"/>
<source>To yourself</source>
<translation>To yourself</translation>
</message>
<message>
<location line="+1"/>
<source>Mined</source>
<translation>Mined</translation>
</message>
<message>
<location line="+1"/>
<source>Minted</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Masternode Reward</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Zerocoin Mint</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Zerocoin Spend</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Zerocoin Spend to Self</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Other</source>
<translation>Other</translation>
</message>
<message>
<location line="+6"/>
<source>Enter address or label to search</source>
<translation>Enter address or label to search</translation>
</message>
<message>
<location line="+4"/>
<source>Min amount</source>
<translation>Min amount</translation>
</message>
<message>
<location line="+35"/>
<source>Copy address</source>
<translation>Copy address</translation>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation>Copy label</translation>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation>Copy amount</translation>
</message>
<message>
<location line="+1"/>
<source>Copy transaction ID</source>
<translation>Copy transaction ID</translation>
</message>
<message>
<location line="+1"/>
<source>Edit label</source>
<translation>Edit label</translation>
</message>
<message>
<location line="+1"/>
<source>Show transaction details</source>
<translation>Show transaction details</translation>
</message>
<message>
<location line="+1"/>
<source>Hide orphan stakes</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+228"/>
<source>Export Transaction History</source>
<translation>Export Transaction History</translation>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Comma separated file (*.csv)</translation>
</message>
<message>
<location line="+11"/>
<source>Confirmed</source>
<translation>Confirmed</translation>
</message>
<message>
<location line="+2"/>
<source>Watch-only</source>
<translation>Watch-only</translation>
</message>
<message>
<location line="+1"/>
<source>Date</source>
<translation>Date</translation>
</message>
<message>
<location line="+1"/>
<source>Type</source>
<translation>Type</translation>
</message>
<message>
<location line="+1"/>
<source>Label</source>
<translation>Label</translation>
</message>
<message>
<location line="+1"/>
<source>Address</source>
<translation>Address</translation>
</message>
<message>
<location line="+2"/>
<source>ID</source>
<translation>ID</translation>
</message>
<message>
<location line="+10"/>
<source>Exporting Failed</source>
<translation>Exporting Failed</translation>
</message>
<message>
<location line="+0"/>
<source>There was an error trying to save the transaction history to %1.</source>
<translation>There was an error trying to save the transaction history to %1.</translation>
</message>
<message>
<location line="-4"/>
<source>Exporting Successful</source>
<translation>Exporting Successful</translation>
</message>
<message>
<location line="-310"/>
<source>Received GGH from zGGH</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Zerocoin Spend, Change in zGGH</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+307"/>
<source>The transaction history was successfully saved to %1.</source>
<translation>The transaction history was successfully saved to %1.</translation>
</message>
<message>
<location line="+121"/>
<source>Range:</source>
<translation>Range:</translation>
</message>
<message>
<location line="+8"/>
<source>to</source>
<translation>to</translation>
</message>
</context>
<context>
<name>UnitDisplayStatusBarControl</name>
<message>
<location filename="../bitcoingui.cpp" line="+120"/>
<source>Unit to show amounts in. Click to select another unit.</source>
<translation>Unit to show amounts in. Click to select another unit.</translation>
</message>
</context>
<context>
<name>WalletFrame</name>
<message>
<location filename="../walletframe.cpp" line="+26"/>
<source>No wallet has been loaded.</source>
<translation>No wallet has been loaded.</translation>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="+334"/>
<location line="+9"/>
<location line="+9"/>
<source>Send Coins</source>
<translation>Send Coins</translation>
</message>
<message>
<location line="-18"/>
<location line="+9"/>
<source>SwiftX doesn't support sending values that high yet. Transactions are currently limited to %1 GGH.</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>WalletView</name>
<message>
<location filename="../walletview.cpp" line="+64"/>
<source>HISTORY</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+32"/>
<source>&Export</source>
<translation>&Export</translation>
</message>
<message>
<location line="+1"/>
<source>Export the data in the current tab to a file</source>
<translation>Export the data in the current tab to a file</translation>
</message>
<message>
<location line="+9"/>
<source>Selected amount:</source>
<translation>Selected amount:</translation>
</message>
<message>
<location line="+265"/>
<source>Backup Wallet</source>
<translation>Backup Wallet</translation>
</message>
<message>
<location line="+1"/>
<source>Wallet Data (*.dat)</source>
<translation>Wallet Data (*.dat)</translation>
</message>
</context>
<context>
<name>ZPivControlDialog</name>
<message>
<location filename="../forms/zgghcontroldialog.ui" line="+20"/>
<source>Select zGGH to Spend</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+28"/>
<source>Quantity</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+7"/>
<location line="+14"/>
<source>0</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-7"/>
<source>zGGH</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+39"/>
<source>Select/Deselect All</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+79"/>
<source>Spendable?</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>ggcash-core</name>
<message>
<location filename="../ggcashstrings.cpp" line="+15"/>
<source>(1 = keep tx meta data e.g. account owner and payment request information, 2 = drop tx meta data)</source>
<translation type="unfinished">(1 = keep tx meta data e.g. account owner and payment request information, 2 = drop tx meta data)</translation>
</message>
<message>
<location line="+3"/>
<source>Allow JSON-RPC connections from specified source. Valid for <ip> are a single IP (e.g. 1.2.3.4), a network/netmask (e.g. 1.2.3.4/255.255.255.0) or a network/CIDR (e.g. 1.2.3.4/24). This option can be specified multiple times</source>
<translation type="unfinished">Allow JSON-RPC connections from specified source. Valid for <ip> are a single IP (e.g. 1.2.3.4), a network/netmask (e.g. 1.2.3.4/255.255.255.0) or a network/CIDR (e.g. 1.2.3.4/24). This option can be specified multiple times</translation>
</message>
<message>
<location line="+4"/>
<source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source>
<translation type="unfinished">Bind to given address and always listen on it. Use [host]:port notation for IPv6</translation>
</message>
<message>
<location line="+3"/>
<source>Bind to given address and whitelist peers connecting to it. Use [host]:port notation for IPv6</source>
<translation type="unfinished">Bind to given address and whitelist peers connecting to it. Use [host]:port notation for IPv6</translation>
</message>
<message>
<location line="+3"/>
<source>Bind to given address to listen for JSON-RPC connections. Use [host]:port notation for IPv6. This option can be specified multiple times (default: bind to all interfaces)</source>
<translation type="unfinished">Bind to given address to listen for JSON-RPC connections. Use [host]:port notation for IPv6. This option can be specified multiple times (default: bind to all interfaces)</translation>
</message>
<message>
<location line="+4"/>
<source>Calculated accumulator checkpoint is not what is recorded by block index</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Cannot obtain a lock on data directory %s. GGCash Core is probably already running.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Change automatic finalized budget voting behavior. mode=auto: Vote for only exact finalized budget match to my generated budget. (string, default: auto)</source>
<translation type="unfinished">Change automatic finalized budget voting behavior. mode=auto: Vote for only exact finalized budget match to my generated budget. (string, default: auto)</translation>
</message>
<message>
<location line="+3"/>
<source>Continuously rate-limit free transactions to <n>*1000 bytes per minute (default:%u)</source>
<translation type="unfinished">Continuously rate-limit free transactions to <n>*1000 bytes per minute (default:%u)</translation>
</message>
<message>
<location line="+3"/>
<source>Create new files with system default permissions, instead of umask 077 (only effective with disabled wallet functionality)</source>
<translation type="unfinished">Create new files with system default permissions, instead of umask 077 (only effective with disabled wallet functionality)</translation>
</message>
<message>
<location line="+3"/>
<source>Delete all wallet transactions and only recover those parts of the blockchain through -rescan on startup</source>
<translation type="unfinished">Delete all wallet transactions and only recover those parts of the blockchain through -rescan on startup</translation>
</message>
<message>
<location line="+3"/>
<source>Delete all zerocoin spends and mints that have been recorded to the blockchain database and reindex them (0-1, default: %u)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+6"/>
<source>Distributed under the MIT software license, see the accompanying file COPYING or <http://www.opensource.org/licenses/mit-license.php>.</source>
<translation type="unfinished">Distributed under the MIT software license, see the accompanying file COPYING or <http://www.opensource.org/licenses/mit-license.php>.</translation>
</message>
<message>
<location line="+5"/>
<source>Enable automatic Zerocoin minting from specific addresses (0-1, default: %u)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Enable automatic wallet backups triggered after each zGGH minting (0-1, default: %u)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Enable or disable staking functionality for GGH inputs (0-1, default: %u)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Enable or disable staking functionality for zGGH inputs (0-1, default: %u)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Enable spork administration functionality with the appropriate private key.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Enter regression test mode, which uses a special chain in which blocks can be solved instantly.</source>
<translation type="unfinished">Enter regression test mode, which uses a special chain in which blocks can be solved instantly.</translation>
</message>
<message>
<location line="+3"/>
<source>Error: Listening for incoming connections failed (listen returned error %s)</source>
<translation type="unfinished">Error: Listening for incoming connections failed (listen returned error %s)</translation>
</message>
<message>
<location line="+2"/>
<source>Error: The transaction is larger than the maximum allowed transaction size!</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+11"/>
<source>Error: Unsupported argument -socks found. Setting SOCKS version isn't possible anymore, only SOCKS5 proxies are supported.</source>
<translation type="unfinished">Error: Unsupported argument -socks found. Setting SOCKS version isn't possible anymore, only SOCKS5 proxies are supported.</translation>
</message>
<message>
<location line="+3"/>
<source>Execute command when a relevant alert is received or we see a really long fork (%s in cmd is replaced by message)</source>
<translation type="unfinished">Execute command when a relevant alert is received or we see a really long fork (%s in cmd is replaced by message)</translation>
</message>
<message>
<location line="+3"/>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation type="unfinished">Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</translation>
</message>
<message>
<location line="+3"/>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation type="unfinished">Execute command when the best block changes (%s in cmd is replaced by block hash)</translation>
</message>
<message>
<location line="+8"/>
<source>Fees (in GGH/Kb) smaller than this are considered zero fee for relaying (default: %s)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Fees (in GGH/Kb) smaller than this are considered zero fee for transaction creation (default: %s)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Flush database activity from memory pool to disk log every <n> megabytes (default: %u)</source>
<translation type="unfinished">Flush database activity from memory pool to disk log every <n> megabytes (default: %u)</translation>
</message>
<message>
<location line="+3"/>
<source>If paytxfee is not set, include enough fee so transactions begin confirmation on average within n blocks (default: %u)</source>
<translation type="unfinished">If paytxfee is not set, include enough fee so transactions begin confirmation on average within n blocks (default: %u)</translation>
</message>
<message>
<location line="+6"/>
<source>In this mode -genproclimit controls how many blocks are generated immediately.</source>
<translation type="unfinished">In this mode -genproclimit controls how many blocks are generated immediately.</translation>
</message>
<message>
<location line="+3"/>
<source>Insufficient or insufficient confirmed funds, you might need to wait a few minutes and try again.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Invalid amount for -maxtxfee=<amount>: '%s' (must be at least the minrelay fee of %s to prevent stuck transactions)</source>
<translation type="unfinished">Invalid amount for -maxtxfee=<amount>: '%s' (must be at least the minrelay fee of %s to prevent stuck transactions)</translation>
</message>
<message>
<location line="+3"/>
<source>Keep the specified amount available for spending at all times (default: 0)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Log transaction priority and fee per kB when mining blocks (default: %u)</source>
<translation type="unfinished">Log transaction priority and fee per kB when mining blocks (default: %u)</translation>
</message>
<message>
<location line="+2"/>
<source>Maintain a full transaction index, used by the getrawtransaction rpc call (default: %u)</source>
<translation type="unfinished">Maintain a full transaction index, used by the getrawtransaction rpc call (default: %u)</translation>
</message>
<message>
<location line="+3"/>
<source>Maximum average size of an index occurrence in the block spam filter (default: %u)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Maximum size of data in data carrier transactions we relay and mine (default: %u)</source>
<translation type="unfinished">Maximum size of data in data carrier transactions we relay and mine (default: %u)</translation>
</message>
<message>
<location line="+3"/>
<source>Maximum size of the list of indexes in the block spam filter (default: %u)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Maximum total fees to use in a single wallet transaction, setting too low may abort large transactions (default: %s)</source>
<translation type="unfinished">Maximum total fees to use in a single wallet transaction, setting too low may abort large transactions (default: %s)</translation>
</message>
<message>
<location line="+3"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: %u)</source>
<translation type="unfinished">Number of seconds to keep misbehaving peers from reconnecting (default: %u)</translation>
</message>
<message>
<location line="+2"/>
<source>Obfuscation uses exact denominated amounts to send funds, you might simply need to anonymize some more coins.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Output debugging information (default: %u, supplying <category> is optional)</source>
<translation type="unfinished">Output debugging information (default: %u, supplying <category> is optional)</translation>
</message>
<message>
<location line="+5"/>
<source>Query for peer addresses via DNS lookup, if low on addresses (default: 1 unless -connect)</source>
<translation type="unfinished">Query for peer addresses via DNS lookup, if low on addresses (default: 1 unless -connect)</translation>
</message>
<message>
<location line="+3"/>
<source>Randomize credentials for every proxy connection. This enables Tor stream isolation (default: %u)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Require high priority for relaying free or low-fee transactions (default:%u)</source>
<translation type="unfinished">Require high priority for relaying free or low-fee transactions (default:%u)</translation>
</message>
<message>
<location line="+2"/>
<source>Send trace/debug info to console instead of debug.log file (default: %u)</source>
<translation type="unfinished">Send trace/debug info to console instead of debug.log file (default: %u)</translation>
</message>
<message>
<location line="+2"/>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: %d)</source>
<translation type="unfinished">Set maximum size of high-priority/low-fee transactions in bytes (default: %d)</translation>
</message>
<message>
<location line="+2"/>
<source>Set the number of included blocks to precompute per cycle. (minimum: %d) (maximum: %d) (default: %d)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Set the number of script verification threads (%u to %d, 0 = auto, <0 = leave that many cores free, default: %d)</source>
<translation type="unfinished">Set the number of script verification threads (%u to %d, 0 = auto, <0 = leave that many cores free, default: %d)</translation>
</message>
<message>
<location line="+3"/>
<source>Set the number of threads for coin generation if enabled (-1 = all cores, default: %d)</source>
<translation type="unfinished">Set the number of threads for coin generation if enabled (-1 = all cores, default: %d)</translation>
</message>
<message>
<location line="+3"/>
<source>Show N confirmations for a successfully locked transaction (0-9999, default: %u)</source>
<translation type="unfinished">Show N confirmations for a successfully locked transaction (0-9999, default: %u)</translation>
</message>
<message>
<location line="+12"/>
<source>Support filtering of blocks and transaction with bloom filters (default: %u)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+5"/>
<source>The block database contains a block which appears to be from the future. This may be due to your computer's date and time being set incorrectly. Only rebuild the block database if you are sure that your computer's date and time are correct</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+8"/>
<source>This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit <https://www.openssl.org/> and cryptographic software written by Eric Young and UPnP software written by Thomas Bernard.</source>
<translation type="unfinished">This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit <https://www.openssl.org/> and cryptographic software written by Eric Young and UPnP software written by Thomas Bernard.</translation>
</message>
<message>
<location line="+4"/>
<source>Total length of network version string (%i) exceeds maximum length (%i). Reduce the number or size of uacomments.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Unable to bind to %s on this computer. GGCash Core is probably already running.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Unable to locate enough Obfuscation denominated funds for this transaction.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Unable to locate enough Obfuscation non-denominated funds for this transaction that are not equal 10000 GGH.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Unable to locate enough funds for this transaction that are not equal 10000 GGH.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Use separate SOCKS5 proxy to reach peers via Tor hidden services (default: %s)</source>
<translation type="unfinished">Use separate SOCKS5 proxy to reach peers via Tor hidden services (default: %s)</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: -maxtxfee is set very high! Fees this large could be paid on a single transaction.</source>
<translation type="unfinished">Warning: -maxtxfee is set very high! Fees this large could be paid on a single transaction.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation type="unfinished">Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong GGCash Core will not work properly.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Warning: The network does not appear to fully agree! Some miners appear to be experiencing issues.</source>
<translation type="unfinished">Warning: The network does not appear to fully agree! Some miners appear to be experiencing issues.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: We do not appear to fully agree with our peers! You may need to upgrade, or other nodes may need to upgrade.</source>
<translation type="unfinished">Warning: We do not appear to fully agree with our peers! You may need to upgrade, or other nodes may need to upgrade.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation type="unfinished">Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation type="unfinished">Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</translation>
</message>
<message>
<location line="+4"/>
<source>Whitelist peers connecting from the given netmask or IP address. Can be specified multiple times.</source>
<translation type="unfinished">Whitelist peers connecting from the given netmask or IP address. Can be specified multiple times.</translation>
</message>
<message>
<location line="+3"/>
<source>Whitelisted peers cannot be DoS banned and their transactions are always relayed, even if they are already in the mempool, useful e.g. for a gateway</source>
<translation type="unfinished">Whitelisted peers cannot be DoS banned and their transactions are always relayed, even if they are already in the mempool, useful e.g. for a gateway</translation>
</message>
<message>
<location line="+3"/>
<source>You must specify a masternodeprivkey in the configuration. Please see documentation for help.</source>
<translation type="unfinished">You must specify a masternodeprivkey in the configuration. Please see documentation for help.</translation>
</message>
<message>
<location line="+3"/>
<source>(24484 could be used only on mainnet)</source>
<translation type="unfinished">(24484 could be used only on mainnet)</translation>
</message>
<message>
<location line="+1"/>
<source>(default: %s)</source>
<translation type="unfinished">(default: %s)</translation>
</message>
<message>
<location line="+1"/>
<source>(default: 1)</source>
<translation type="unfinished">(default: 1)</translation>
</message>
<message>
<location line="+1"/>
<source>(must be 24484 for mainnet)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Accept command line and JSON-RPC commands</source>
<translation type="unfinished">Accept command line and JSON-RPC commands</translation>
</message>
<message>
<location line="+1"/>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation type="unfinished">Accept connections from outside (default: 1 if no -proxy or -connect)</translation>
</message>
<message>
<location line="+1"/>
<source>Accept public REST requests (default: %u)</source>
<translation type="unfinished">Accept public REST requests (default: %u)</translation>
</message>
<message>
<location line="+1"/>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation type="unfinished">Add a node to connect to and attempt to keep the connection open</translation>
</message>
<message>
<location line="+1"/>
<source>Adding Wrapped Serials supply...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation type="unfinished">Allow DNS lookups for -addnode, -seednode and -connect</translation>
</message>
<message>
<location line="+1"/>
<source>Always query for peer addresses via DNS lookup (default: %u)</source>
<translation type="unfinished">Always query for peer addresses via DNS lookup (default: %u)</translation>
</message>
<message>
<location line="+1"/>
<source>Append comment to the user agent string</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation type="unfinished">Attempt to recover private keys from a corrupt wallet.dat</translation>
</message>
<message>
<location line="+1"/>
<source>Automatically create Tor hidden service (default: %d)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Block creation options:</source>
<translation type="unfinished">Block creation options:</translation>
</message>
<message>
<location line="+1"/>
<source>Calculating missing accumulators...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Cannot downgrade wallet</source>
<translation type="unfinished">Cannot downgrade wallet</translation>
</message>
<message>
<location line="+1"/>
<source>Cannot resolve -bind address: '%s'</source>
<translation type="unfinished">Cannot resolve -bind address: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Cannot resolve -externalip address: '%s'</source>
<translation type="unfinished">Cannot resolve -externalip address: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Cannot resolve -whitebind address: '%s'</source>
<translation type="unfinished">Cannot resolve -whitebind address: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Cannot write default address</source>
<translation type="unfinished">Cannot write default address</translation>
</message>
<message>
<location line="+2"/>
<source>CoinSpend: failed check</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Connect only to the specified node(s)</source>
<translation type="unfinished">Connect only to the specified node(s)</translation>
</message>
<message>
<location line="+1"/>
<source>Connect through SOCKS5 proxy</source>
<translation type="unfinished">Connect through SOCKS5 proxy</translation>
</message>
<message>
<location line="+1"/>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation type="unfinished">Connect to a node to retrieve peer addresses, and disconnect</translation>
</message>
<message>
<location line="+1"/>
<source>Connection options:</source>
<translation type="unfinished">Connection options:</translation>
</message>
<message>
<location line="+1"/>
<source>Copyright (C) 2009-%i The Bitcoin Core Developers</source>
<translation type="unfinished">Copyright (C) 2009-%i The Bitcoin Core Developers</translation>
</message>
<message>
<location line="+1"/>
<source>Copyright (C) 2014-%i The Dash Core Developers</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Copyright (C) 2015-%i The PIVX Core Developers</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Corrupted block database detected</source>
<translation type="unfinished">Corrupted block database detected</translation>
</message>
<message>
<location line="+1"/>
<source>Could not parse masternode.conf</source>
<translation type="unfinished">Could not parse masternode.conf</translation>
</message>
<message>
<location line="+1"/>
<source>Couldn't generate the accumulator witness</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Debugging/Testing options:</source>
<translation type="unfinished">Debugging/Testing options:</translation>
</message>
<message>
<location line="+1"/>
<source>Delete blockchain folders and resync from scratch</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Disable OS notifications for incoming transactions (default: %u)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Disable safemode, override a real safe mode event (default: %u)</source>
<translation type="unfinished">Disable safemode, override a real safe mode event (default: %u)</translation>
</message>
<message>
<location line="+1"/>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation type="unfinished">Discover own IP address (default: 1 when listening and no -externalip)</translation>
</message>
<message>
<location line="+3"/>
<source>Do not load the wallet and disable wallet RPC calls</source>
<translation type="unfinished">Do not load the wallet and disable wallet RPC calls</translation>
</message>
<message>
<location line="+1"/>
<source>Do you want to rebuild the block database now?</source>
<translation type="unfinished">Do you want to rebuild the block database now?</translation>
</message>
<message>
<location line="+1"/>
<source>Done loading</source>
<translation type="unfinished">Done loading</translation>
</message>
<message>
<location line="+1"/>
<source>Enable automatic Zerocoin minting (0-1, default: %u)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Enable precomputation of zGGH spends and stakes (0-1, default %u)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Enable publish hash transaction (locked via SwiftX) in <address></source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Enable publish raw transaction (locked via SwiftX) in <address></source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Enable the client to act as a masternode (0-1, default: %u)</source>
<translation type="unfinished">Enable the client to act as a masternode (0-1, default: %u)</translation>
</message>
<message>
<location line="+1"/>
<source>Error initializing block database</source>
<translation type="unfinished">Error initializing block database</translation>
</message>
<message>
<location line="+1"/>
<source>Error initializing wallet database environment %s!</source>
<translation type="unfinished">Error initializing wallet database environment %s!</translation>
</message>
<message>
<location line="+1"/>
<source>Error loading block database</source>
<translation type="unfinished">Error loading block database</translation>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat</source>
<translation type="unfinished">Error loading wallet.dat</translation>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation type="unfinished">Error loading wallet.dat: Wallet corrupted</translation>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat: Wallet requires newer version of GGCash Core</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Error opening block database</source>
<translation type="unfinished">Error opening block database</translation>
</message>
<message>
<location line="+1"/>
<source>Error reading from database, shutting down.</source>
<translation type="unfinished">Error reading from database, shutting down.</translation>
</message>
<message>
<location line="+1"/>
<source>Error recovering public key.</source>
<translation type="unfinished">Error recovering public key.</translation>
</message>
<message>
<location line="+1"/>
<source>Error writing zerocoinDB to disk</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Error</source>
<translation type="unfinished">Error</translation>
</message>
<message>
<location line="+1"/>
<source>Error: A fatal internal error occured, see debug.log for details</source>
<translation type="unfinished">Error: A fatal internal error occured, see debug.log for details</translation>
</message>
<message>
<location line="+2"/>
<source>Error: Disk space is low!</source>
<translation type="unfinished">Error: Disk space is low!</translation>
</message>
<message>
<location line="+2"/>
<source>Error: Unsupported argument -tor found, use -onion.</source>
<translation type="unfinished">Error: Unsupported argument -tor found, use -onion.</translation>
</message>
<message>
<location line="+1"/>
<source>Error: Wallet locked, unable to create transaction!</source>
<translation type="unfinished">Error: Wallet locked, unable to create transaction!</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to calculate accumulator checkpoint</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation type="unfinished">Failed to listen on any port. Use -listen=0 if you want this.</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to parse host:port string</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Failed to read block</source>
<translation type="unfinished">Failed to read block</translation>
</message>
<message>
<location line="+4"/>
<source>Fee (in GGH/kB) to add to transactions you send (default: %s)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Force safe mode (default: %u)</source>
<translation type="unfinished">Force safe mode (default: %u)</translation>
</message>
<message>
<location line="+1"/>
<source>Generate coins (default: %u)</source>
<translation type="unfinished">Generate coins (default: %u)</translation>
</message>
<message>
<location line="+1"/>
<source>How many blocks to check at startup (default: %u, 0 = all)</source>
<translation type="unfinished">How many blocks to check at startup (default: %u, 0 = all)</translation>
</message>
<message>
<location line="+1"/>
<source>If <category> is not supplied, output all debugging information.</source>
<translation type="unfinished">If <category> is not supplied, output all debugging information.</translation>
</message>
<message>
<location line="+1"/>
<source>Importing...</source>
<translation type="unfinished">Importing...</translation>
</message>
<message>
<location line="+1"/>
<source>Imports blocks from external blk000??.dat file</source>
<translation type="unfinished">Imports blocks from external blk000??.dat file</translation>
</message>
<message>
<location line="+1"/>
<source>Include IP addresses in debug output (default: %u)</source>
<translation type="unfinished">Include IP addresses in debug output (default: %u)</translation>
</message>
<message>
<location line="+1"/>
<source>Incorrect or no genesis block found. Wrong datadir for network?</source>
<translation type="unfinished">Incorrect or no genesis block found. Wrong datadir for network?</translation>
</message>
<message>
<location line="+1"/>
<source>Information</source>
<translation type="unfinished">Information</translation>
</message>
<message>
<location line="+1"/>
<source>Initialization sanity check failed. GGCash Core is shutting down.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Insufficient funds</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Insufficient funds.</source>
<translation type="unfinished">Insufficient funds.</translation>
</message>
<message>
<location line="+1"/>
<source>Invalid -onion address or hostname: '%s'</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -maxtxfee=<amount>: '%s'</source>
<translation type="unfinished">Invalid amount for -maxtxfee=<amount>: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -minrelaytxfee=<amount>: '%s'</source>
<translation type="unfinished">Invalid amount for -minrelaytxfee=<amount>: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -mintxfee=<amount>: '%s'</source>
<translation type="unfinished">Invalid amount for -mintxfee=<amount>: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -paytxfee=<amount>: '%s' (must be at least %s)</source>
<translation type="unfinished">Invalid amount for -paytxfee=<amount>: '%s' (must be at least %s)</translation>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation type="unfinished">Invalid amount for -paytxfee=<amount>: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -reservebalance=<amount></source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Invalid amount</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Invalid masternodeprivkey. Please see documenation.</source>
<translation type="unfinished">Invalid masternodeprivkey. Please see documenation.</translation>
</message>
<message>
<location line="+1"/>
<source>Invalid netmask specified in -whitelist: '%s'</source>
<translation type="unfinished">Invalid netmask specified in -whitelist: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Invalid port detected in masternode.conf</source>
<translation type="unfinished">Invalid port detected in masternode.conf</translation>
</message>
<message>
<location line="+1"/>
<source>Invalid private key.</source>
<translation type="unfinished">Invalid private key.</translation>
</message>
<message>
<location line="+32"/>
<source>Percentage of automatically minted Zerocoin (1-100, default: %u)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+9"/>
<source>Recalculating GGH supply...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Recalculating minted ZGGH...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Recalculating spent ZGGH...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Reindex the GGH and zGGH money supply statistics</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Reindexing zerocoin database...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Reindexing zerocoin failed</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+9"/>
<source>Selected coins value is less than payment target</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+28"/>
<source>Support the zerocoin light node protocol (default: %u)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>SwiftX options:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-247"/>
<source>This is a pre-release test build - use at your own risk - do not use for staking or merchant applications!</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-187"/>
<source> mints deleted
</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source> mints updated, </source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source> unconfirmed transactions removed
</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+38"/>
<source>Disable all GGCash specific functionality (Masternodes, Zerocoin, SwiftX, Budgeting) (0-1, default: %u)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+6"/>
<source>Enable SwiftX, show confirmations for locked transactions (bool, default: %s)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+20"/>
<source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+4"/>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Error: Unsupported argument -checklevel found. Checklevel must be level 4.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+14"/>
<source>Execute command when the best block changes and its size is over (%s in cmd is replaced by block hash, %d with the block size)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Failed to find coin set amongst held coins with less than maxNumber of Spends</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+14"/>
<source>In rare cases, a spend with 7 coins exceeds our maximum allowable transaction size, please retry spend using 6 or less coins</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+37"/>
<source>Preferred Denomination for automatically minted Zerocoin (1/5/10/50/100/500/1000/5000), 0 for no preference. default: %u)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+27"/>
<source>Specify custom backup path to add a copy of any automatic zGGH backup. If set as dir, every backup generates a timestamped file. If set as file, will rewrite to that file every backup. If backuppath is set as well, 4 backups will happen</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+5"/>
<source>Specify custom backup path to add a copy of any wallet backup. If set as dir, every backup generates a timestamped file. If set as file, will rewrite to that file every backup.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+6"/>
<source>SwiftX requires inputs with at least 6 confirmations, you might need to wait a few minutes and try again.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+66"/>
<source><category> can be:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+9"/>
<source>Attempt to force blockchain corruption recovery</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+5"/>
<source>Cannot create public spend input</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+6"/>
<source>CoinSpend: Accumulator witness does not verify</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+17"/>
<source>Display the stake modifier calculations in the debug.log file.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Display verbose coin stake messages in the debug.log file.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+6"/>
<source>Enable publish hash block in <address></source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Enable publish hash transaction in <address></source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Enable publish raw block in <address></source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Enable publish raw transaction in <address></source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Enable staking functionality (0-1, default: %u)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+14"/>
<source>Error: A fatal internal error occurred, see debug.log for details</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Error: No valid utxo!</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+4"/>
<source>Failed to create mint</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Failed to find Zerocoins in wallet.dat</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Failed to parse public spend</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Failed to select a zerocoin</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Failed to wipe zerocoinDB</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write coin serial number into wallet</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+26"/>
<source>Keep at most <n> unconnectable transactions in memory (default: %u)</source>
<translation type="unfinished">Keep at most <n> unconnectable transactions in memory (default: %u)</translation>
</message>
<message>
<location line="+1"/>
<source>Limit size of signature cache to <n> entries (default: %u)</source>
<translation type="unfinished">Limit size of signature cache to <n> entries (default: %u)</translation>
</message>
<message>
<location line="+1"/>
<source>Line: %d</source>
<translation type="unfinished">Line: %d</translation>
</message>
<message>
<location line="+1"/>
<source>Listen for JSON-RPC connections on <port> (default: %u or testnet: %u)</source>
<translation type="unfinished">Listen for JSON-RPC connections on <port> (default: %u or testnet: %u)</translation>
</message>
<message>
<location line="+1"/>
<source>Listen for connections on <port> (default: %u or testnet: %u)</source>
<translation type="unfinished">Listen for connections on <port> (default: %u or testnet: %u)</translation>
</message>
<message>
<location line="+1"/>
<source>Loading addresses...</source>
<translation type="unfinished">Loading addresses...</translation>
</message>
<message>
<location line="+1"/>
<source>Loading block index...</source>
<translation type="unfinished">Loading block index...</translation>
</message>
<message>
<location line="+1"/>
<source>Loading budget cache...</source>
<translation type="unfinished">Loading budget cache...</translation>
</message>
<message>
<location line="+1"/>
<source>Loading masternode cache...</source>
<translation type="unfinished">Loading masternode cache...</translation>
</message>
<message>
<location line="+1"/>
<source>Loading masternode payment cache...</source>
<translation type="unfinished">Loading masternode payment cache...</translation>
</message>
<message>
<location line="+1"/>
<source>Loading sporks...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Loading wallet... (%3.2f %%)</source>
<translation type="unfinished">Loading wallet... (%3.2f %%)</translation>
</message>
<message>
<location line="+1"/>
<source>Loading wallet...</source>
<translation type="unfinished">Loading wallet...</translation>
</message>
<message>
<location line="+1"/>
<source>Location of the auth cookie (default: data dir)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Lock masternodes from masternode configuration file (default: %u)</source>
<translation type="unfinished">Lock masternodes from masternode configuration file (default: %u)</translation>
</message>
<message>
<location line="+1"/>
<source>Lookup(): Invalid -proxy address or hostname: '%s'</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Maintain at most <n> connections to peers (default: %u)</source>
<translation type="unfinished">Maintain at most <n> connections to peers (default: %u)</translation>
</message>
<message>
<location line="+1"/>
<source>Masternode options:</source>
<translation type="unfinished">Masternode options:</translation>
</message>
<message>
<location line="+1"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: %u)</source>
<translation type="unfinished">Maximum per-connection receive buffer, <n>*1000 bytes (default: %u)</translation>
</message>
<message>
<location line="+1"/>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: %u)</source>
<translation type="unfinished">Maximum per-connection send buffer, <n>*1000 bytes (default: %u)</translation>
</message>
<message>
<location line="+1"/>
<source>Mint did not make it into blockchain</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Need address because change is not exact</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Need to specify a port with -whitebind: '%s'</source>
<translation type="unfinished">Need to specify a port with -whitebind: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Node relay options:</source>
<translation type="unfinished">Node relay options:</translation>
</message>
<message>
<location line="+1"/>
<source>Not enough file descriptors available.</source>
<translation type="unfinished">Not enough file descriptors available.</translation>
</message>
<message>
<location line="+1"/>
<source>Number of automatic wallet backups (default: 10)</source>
<translation type="unfinished">Number of automatic wallet backups (default: 10)</translation>
</message>
<message>
<location line="+1"/>
<source>Number of custom location backups to retain (default: %d)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Only accept block chain matching built-in checkpoints (default: %u)</source>
<translation type="unfinished">Only accept block chain matching built-in checkpoints (default: %u)</translation>
</message>
<message>
<location line="+1"/>
<source>Only connect to nodes in network <net> (ipv4, ipv6 or onion)</source>
<translation type="unfinished">Only connect to nodes in network <net> (ipv4, ipv6 or onion)</translation>
</message>
<message>
<location line="+1"/>
<source>Options:</source>
<translation type="unfinished">Options:</translation>
</message>
<message>
<location line="+1"/>
<source>Password for JSON-RPC connections</source>
<translation type="unfinished">Password for JSON-RPC connections</translation>
</message>
<message>
<location line="+81"/>
<source>Unable to find transaction containing mint %s</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Unable to find transaction containing mint, txHash: %s</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+8"/>
<source>Use block spam filter (default: %u)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+20"/>
<source>could not get lock on cs_spendcache</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>isValid(): Invalid -proxy address or hostname: '%s'</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="-109"/>
<source>Preparing for resync...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Prepend debug output with timestamp (default: %u)</source>
<translation type="unfinished">Prepend debug output with timestamp (default: %u)</translation>
</message>
<message>
<location line="+1"/>
<source>Print version and exit</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Pubcoin not found in mint tx</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>RPC server options:</source>
<translation type="unfinished">RPC server options:</translation>
</message>
<message>
<location line="+1"/>
<source>Randomly drop 1 of every <n> network messages</source>
<translation type="unfinished">Randomly drop 1 of every <n> network messages</translation>
</message>
<message>
<location line="+1"/>
<source>Randomly fuzz 1 of every <n> network messages</source>
<translation type="unfinished">Randomly fuzz 1 of every <n> network messages</translation>
</message>
<message>
<location line="+1"/>
<source>Rebuild block chain index from current blk000??.dat files</source>
<translation type="unfinished">Rebuild block chain index from current blk000??.dat files</translation>
</message>
<message>
<location line="+4"/>
<source>Receive and display P2P network alerts (default: %u)</source>
<translation type="unfinished">Receive and display P2P network alerts (default: %u)</translation>
</message>
<message>
<location line="+2"/>
<source>Reindex the accumulator database</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>Relay and mine data carrier transactions (default: %u)</source>
<translation type="unfinished">Relay and mine data carrier transactions (default: %u)</translation>
</message>
<message>
<location line="+1"/>
<source>Relay non-P2SH multisig (default: %u)</source>
<translation type="unfinished">Relay non-P2SH multisig (default: %u)</translation>
</message>
<message>
<location line="+1"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation type="unfinished">Rescan the block chain for missing wallet transactions</translation>
</message>
<message>
<location line="+1"/>
<source>Rescanning...</source>
<translation type="unfinished">Rescanning...</translation>
</message>
<message>
<location line="+1"/>
<source>ResetMintZerocoin finished: </source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>ResetSpentZerocoin finished: </source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Run a thread to flush wallet periodically (default: %u)</source>
<translation type="unfinished">Run a thread to flush wallet periodically (default: %u)</translation>
</message>
<message>
<location line="+1"/>
<source>Run in the background as a daemon and accept commands</source>
<translation type="unfinished">Run in the background as a daemon and accept commands</translation>
</message>
<message>
<location line="+2"/>
<source>Send transactions as zero-fee transactions if possible (default: %u)</source>
<translation type="unfinished">Send transactions as zero-fee transactions if possible (default: %u)</translation>
</message>
<message>
<location line="+1"/>
<source>Session timed out.</source>
<translation type="unfinished">Session timed out.</translation>
</message>
<message>
<location line="+1"/>
<source>Set database cache size in megabytes (%d to %d, default: %d)</source>
<translation type="unfinished">Set database cache size in megabytes (%d to %d, default: %d)</translation>
</message>
<message>
<location line="+1"/>
<source>Set external address:port to get to this masternode (example: %s)</source>
<translation type="unfinished">Set external address:port to get to this masternode (example: %s)</translation>
</message>
<message>
<location line="+1"/>
<source>Set key pool size to <n> (default: %u)</source>
<translation type="unfinished">Set key pool size to <n> (default: %u)</translation>
</message>
<message>
<location line="+1"/>
<source>Set maximum block size in bytes (default: %d)</source>
<translation type="unfinished">Set maximum block size in bytes (default: %d)</translation>
</message>
<message>
<location line="+1"/>
<source>Set minimum block size in bytes (default: %u)</source>
<translation type="unfinished">Set minimum block size in bytes (default: %u)</translation>
</message>
<message>
<location line="+1"/>
<source>Set the Maximum reorg depth (default: %u)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Set the masternode private key</source>
<translation type="unfinished">Set the masternode private key</translation>
</message>
<message>
<location line="+1"/>
<source>Set the number of threads to service RPC calls (default: %d)</source>
<translation type="unfinished">Set the number of threads to service RPC calls (default: %d)</translation>
</message>
<message>
<location line="+1"/>
<source>Sets the DB_PRIVATE flag in the wallet db environment (default: %u)</source>
<translation type="unfinished">Sets the DB_PRIVATE flag in the wallet db environment (default: %u)</translation>
</message>
<message>
<location line="+1"/>
<source>Show all debugging options (usage: --help -help-debug)</source>
<translation type="unfinished">Show all debugging options (usage: --help -help-debug)</translation>
</message>
<message>
<location line="+1"/>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation type="unfinished">Shrink debug.log file on client startup (default: 1 when no -debug)</translation>
</message>
<message>
<location line="+1"/>
<source>Signing failed.</source>
<translation type="unfinished">Signing failed.</translation>
</message>
<message>
<location line="+1"/>
<source>Signing timed out.</source>
<translation type="unfinished">Signing timed out.</translation>
</message>
<message>
<location line="+1"/>
<source>Signing transaction failed</source>
<translation type="unfinished">Signing transaction failed</translation>
</message>
<message>
<location line="+1"/>
<source>Specify configuration file (default: %s)</source>
<translation type="unfinished">Specify configuration file (default: %s)</translation>
</message>
<message>
<location line="+1"/>
<source>Specify connection timeout in milliseconds (minimum: 1, default: %d)</source>
<translation type="unfinished">Specify connection timeout in milliseconds (minimum: 1, default: %d)</translation>
</message>
<message>
<location line="+1"/>
<source>Specify data directory</source>
<translation type="unfinished">Specify data directory</translation>
</message>
<message>
<location line="+1"/>
<source>Specify masternode configuration file (default: %s)</source>
<translation type="unfinished">Specify masternode configuration file (default: %s)</translation>
</message>
<message>
<location line="+1"/>
<source>Specify pid file (default: %s)</source>
<translation type="unfinished">Specify pid file (default: %s)</translation>
</message>
<message>
<location line="+1"/>
<source>Specify wallet file (within data directory)</source>
<translation type="unfinished">Specify wallet file (within data directory)</translation>
</message>
<message>
<location line="+1"/>
<source>Specify your own public address</source>
<translation type="unfinished">Specify your own public address</translation>
</message>
<message>
<location line="+1"/>
<source>Spend Valid</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Spend unconfirmed change when sending transactions (default: %u)</source>
<translation type="unfinished">Spend unconfirmed change when sending transactions (default: %u)</translation>
</message>
<message>
<location line="+1"/>
<source>Staking options:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Stop running after importing blocks from disk (default: %u)</source>
<translation type="unfinished">Stop running after importing blocks from disk (default: %u)</translation>
</message>
<message>
<location line="+3"/>
<source>Synchronization failed</source>
<translation type="unfinished">Synchronization failed</translation>
</message>
<message>
<location line="+1"/>
<source>Synchronization finished</source>
<translation type="unfinished">Synchronization finished</translation>
</message>
<message>
<location line="+1"/>
<source>Synchronization pending...</source>
<translation type="unfinished">Synchronization pending...</translation>
</message>
<message>
<location line="+1"/>
<source>Synchronizing budgets...</source>
<translation type="unfinished">Synchronizing budgets...</translation>
</message>
<message>
<location line="+1"/>
<source>Synchronizing masternode winners...</source>
<translation type="unfinished">Synchronizing masternode winners...</translation>
</message>
<message>
<location line="+1"/>
<source>Synchronizing masternodes...</source>
<translation type="unfinished">Synchronizing masternodes...</translation>
</message>
<message>
<location line="+1"/>
<source>Synchronizing sporks...</source>
<translation type="unfinished">Synchronizing sporks...</translation>
</message>
<message>
<location line="+1"/>
<source>Syncing zGGH wallet...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>The coin spend has been used</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>The transaction did not verify</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>This help message</source>
<translation type="unfinished">This help message</translation>
</message>
<message>
<location line="+1"/>
<source>This is experimental software.</source>
<translation type="unfinished">This is experimental software.</translation>
</message>
<message>
<location line="+1"/>
<source>This is intended for regression testing tools and app development.</source>
<translation type="unfinished">This is intended for regression testing tools and app development.</translation>
</message>
<message>
<location line="+1"/>
<source>Threshold for disconnecting misbehaving peers (default: %u)</source>
<translation type="unfinished">Threshold for disconnecting misbehaving peers (default: %u)</translation>
</message>
<message>
<location line="+1"/>
<source>Too many spends needed</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Tor control port password (default: empty)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Tor control port to use if onion listening enabled (default: %s)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Transaction Created</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Transaction Mint Started</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Transaction amount too small</source>
<translation type="unfinished">Transaction amount too small</translation>
</message>
<message>
<location line="+1"/>
<source>Transaction amounts must be positive</source>
<translation type="unfinished">Transaction amounts must be positive</translation>
</message>
<message>
<location line="+1"/>
<source>Transaction too large for fee policy</source>
<translation type="unfinished">Transaction too large for fee policy</translation>
</message>
<message>
<location line="+1"/>
<source>Transaction too large</source>
<translation type="unfinished">Transaction too large</translation>
</message>
<message>
<location line="+1"/>
<source>Trying to spend an already spent serial #, try again.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Unable to bind to %s on this computer (bind returned error %s)</source>
<translation type="unfinished">Unable to bind to %s on this computer (bind returned error %s)</translation>
</message>
<message>
<location line="+3"/>
<source>Unable to sign spork message, wrong key?</source>
<translation type="unfinished">Unable to sign spork message, wrong key?</translation>
</message>
<message>
<location line="+1"/>
<source>Unable to start HTTP server. See debug log for details.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation type="unfinished">Unknown network specified in -onlynet: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Upgrade wallet to latest format</source>
<translation type="unfinished">Upgrade wallet to latest format</translation>
</message>
<message>
<location line="+1"/>
<source>Use UPnP to map the listening port (default: %u)</source>
<translation type="unfinished">Use UPnP to map the listening port (default: %u)</translation>
</message>
<message>
<location line="+1"/>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation type="unfinished">Use UPnP to map the listening port (default: 1 when listening)</translation>
</message>
<message>
<location line="+1"/>
<source>Use a custom max chain reorganization depth (default: %u)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+2"/>
<source>Use the test network</source>
<translation type="unfinished">Use the test network</translation>
</message>
<message>
<location line="+1"/>
<source>User Agent comment (%s) contains unsafe characters.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Username for JSON-RPC connections</source>
<translation type="unfinished">Username for JSON-RPC connections</translation>
</message>
<message>
<location line="+1"/>
<source>Value is below the smallest available denomination (= 1) of zGGH</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Verifying blocks...</source>
<translation type="unfinished">Verifying blocks...</translation>
</message>
<message>
<location line="+1"/>
<source>Verifying wallet...</source>
<translation type="unfinished">Verifying wallet...</translation>
</message>
<message>
<location line="+1"/>
<source>Wallet %s resides outside data directory %s</source>
<translation type="unfinished">Wallet %s resides outside data directory %s</translation>
</message>
<message>
<location line="+1"/>
<source>Wallet needed to be rewritten: restart GGCash Core to complete</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Wallet options:</source>
<translation type="unfinished">Wallet options:</translation>
</message>
<message>
<location line="+1"/>
<source>Wallet window title</source>
<translation type="unfinished">Wallet window title</translation>
</message>
<message>
<location line="+1"/>
<source>Warning</source>
<translation type="unfinished">Warning</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation type="unfinished">Warning: This version is obsolete, upgrade required!</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: Unsupported argument -benchmark ignored, use -debug=bench.</source>
<translation type="unfinished">Warning: Unsupported argument -benchmark ignored, use -debug=bench.</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: Unsupported argument -debugnet ignored, use -debug=net.</source>
<translation type="unfinished">Warning: Unsupported argument -debugnet ignored, use -debug=net.</translation>
</message>
<message>
<location line="+1"/>
<source>You don't have enough Zerocoins in your wallet</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>You need to rebuild the database using -reindex to change -txindex</source>
<translation type="unfinished">You need to rebuild the database using -reindex to change -txindex</translation>
</message>
<message>
<location line="+1"/>
<source>Zapping all transactions from wallet...</source>
<translation type="unfinished">Zapping all transactions from wallet...</translation>
</message>
<message>
<location line="+1"/>
<source>ZeroMQ notification options:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+1"/>
<source>Zerocoin options:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location line="+3"/>
<source>on startup</source>
<translation type="unfinished">on startup</translation>
</message>
<message>
<location line="+1"/>
<source>wallet.dat corrupt, salvage failed</source>
<translation type="unfinished">wallet.dat corrupt, salvage failed</translation>
</message>
</context>
</TS> | <message>
<location line="+16"/>
<source>MB</source>
<translation>MB</translation> |
Subsets and Splits