file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
memory.rs | /**
* Copyright 2021 Rigetti Computing
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
use std::collections::HashSet;
use crate::expression::Expression;
use crate::instruction::{
Arithmetic, ArithmeticOperand, Capture, CircuitDefinition, Delay, Exchange, Gate,
GateDefinition, Instruction, Jump, JumpUnless, JumpWhen, Label, Load,
MeasureCalibrationDefinition, Measurement, MemoryReference, Move, Pulse, RawCapture, SetPhase,
SetScale, ShiftPhase, Store, Vector, WaveformInvocation,
};
#[derive(Clone, Debug, Hash, PartialEq)]
pub struct MemoryRegion {
pub size: Vector,
pub sharing: Option<String>,
}
impl Eq for MemoryRegion {}
#[derive(Clone, Debug)]
pub struct MemoryAccess {
pub regions: HashSet<String>,
pub access_type: MemoryAccessType,
}
#[derive(Clone, Debug, Default)]
pub struct MemoryAccesses {
pub captures: HashSet<String>,
pub reads: HashSet<String>,
pub writes: HashSet<String>,
}
/// Express a mode of memory access.
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
pub enum MemoryAccessType {
/// Read from a memory location
Read,
/// Write to a memory location using classical instructions
Write,
/// Write to a memory location using readout (`CAPTURE` and `RAW-CAPTURE` instructions)
Capture,
}
macro_rules! merge_sets {
($left:expr, $right:expr) => {
$left.union(&$right).cloned().collect::<HashSet<String>>()
};
}
/// Build a HashSet<String> from a Vec<&str> by cloning
macro_rules! set_from_reference_vec {
($vec:expr) => {
$vec.into_iter()
.map(|el| el.clone())
.collect::<HashSet<String>>()
};
}
/// Build a HashSet<String> from an Option<&MemoryReference>
macro_rules! set_from_optional_memory_reference {
($reference:expr) => {
set_from_reference_vec![$reference.map_or(vec![], |reference| vec![reference.name.clone()])]
};
}
/// Build a HashSet<String> from a Vec<&MemoryReference>
macro_rules! set_from_memory_references {
($references:expr) => {
set_from_reference_vec![$references.iter().map(|reference| reference.name.clone())]
};
}
impl Instruction {
/// Return all memory accesses by the instruction - in expressions, captures, and memory manipulation
pub fn get_memory_accesses(&self) -> MemoryAccesses {
match self {
Instruction::Arithmetic(Arithmetic {
destination,
source,
..
})
| Instruction::Move(Move {
destination,
source,
}) => MemoryAccesses {
writes: set_from_optional_memory_reference![destination.get_memory_reference()],
reads: set_from_optional_memory_reference![source.get_memory_reference()],
..Default::default()
},
Instruction::CalibrationDefinition(definition) => {
let references: Vec<&MemoryReference> = definition
.parameters
.iter()
.flat_map(|expr| expr.get_memory_references())
.collect();
MemoryAccesses {
reads: set_from_memory_references![references],
..Default::default()
}
}
Instruction::Capture(Capture {
memory_reference,
waveform,
..
}) => MemoryAccesses {
captures: set_from_memory_references!(vec![memory_reference]),
reads: set_from_memory_references!(waveform.get_memory_references()),
..Default::default()
},
Instruction::CircuitDefinition(CircuitDefinition { instructions, .. })
| Instruction::MeasureCalibrationDefinition(MeasureCalibrationDefinition {
instructions,
..
}) => instructions.iter().fold(Default::default(), |acc, el| {
let el_accesses = el.get_memory_accesses();
MemoryAccesses {
reads: merge_sets!(acc.reads, el_accesses.reads),
writes: merge_sets!(acc.writes, el_accesses.writes),
captures: merge_sets!(acc.captures, el_accesses.captures),
}
}),
Instruction::Declaration(_) => Default::default(),
Instruction::Delay(Delay { duration, .. }) => MemoryAccesses {
reads: set_from_memory_references!(duration.get_memory_references()),
..Default::default()
},
Instruction::Exchange(Exchange { left, right }) => MemoryAccesses {
writes: merge_sets![
set_from_optional_memory_reference!(left.get_memory_reference()),
set_from_optional_memory_reference!(right.get_memory_reference())
],
..Default::default()
},
Instruction::Fence(_) => Default::default(),
Instruction::FrameDefinition(_) => Default::default(),
Instruction::Gate(Gate { parameters, .. }) => MemoryAccesses {
reads: set_from_memory_references!(parameters
.iter()
.flat_map(|param| param.get_memory_references())
.collect::<Vec<&MemoryReference>>()),
..Default::default()
},
Instruction::GateDefinition(GateDefinition { matrix, .. }) => {
let references = matrix
.iter()
.flat_map(|row| row.iter().flat_map(|cell| cell.get_memory_references()))
.collect::<Vec<&MemoryReference>>();
MemoryAccesses {
reads: set_from_memory_references!(references),
..Default::default()
}
}
Instruction::Halt => Default::default(),
Instruction::Jump(Jump { target: _ }) => Default::default(),
Instruction::JumpWhen(JumpWhen {
target: _,
condition,
})
| Instruction::JumpUnless(JumpUnless {
target: _,
condition,
}) => MemoryAccesses {
reads: set_from_memory_references!(vec![condition]),
..Default::default()
},
Instruction::Label(Label(_)) => Default::default(),
Instruction::Load(Load {
destination,
source,
offset,
}) => MemoryAccesses {
writes: set_from_memory_references![vec![destination]],
reads: set_from_reference_vec![vec![source, &offset.name]],
..Default::default()
},
Instruction::Measurement(Measurement { target, .. }) => MemoryAccesses {
captures: set_from_optional_memory_reference!(target.as_ref()),
..Default::default()
},
Instruction::Pragma(_) => Default::default(),
Instruction::Pulse(Pulse { waveform, .. }) => MemoryAccesses {
reads: set_from_memory_references![waveform.get_memory_references()],
..Default::default()
},
Instruction::RawCapture(RawCapture {
duration,
memory_reference,
..
}) => MemoryAccesses {
reads: set_from_memory_references![duration.get_memory_references()],
captures: set_from_memory_references![vec![memory_reference]],
..Default::default()
},
Instruction::Reset(_) => Default::default(),
Instruction::SetFrequency(_) => Default::default(),
Instruction::SetPhase(SetPhase { phase: expr, .. })
| Instruction::SetScale(SetScale { scale: expr, .. })
| Instruction::ShiftPhase(ShiftPhase { phase: expr, .. }) => MemoryAccesses {
reads: set_from_memory_references!(expr.get_memory_references()),
..Default::default()
},
Instruction::ShiftFrequency(_) => Default::default(),
Instruction::Store(Store {
destination,
offset,
source,
}) => MemoryAccesses {
reads: merge_sets![
set_from_memory_references!(vec![offset]),
set_from_optional_memory_reference!(source.get_memory_reference())
],
writes: set_from_reference_vec![vec![destination]],
..Default::default()
},
Instruction::SwapPhases(_) => Default::default(),
Instruction::WaveformDefinition(_) => Default::default(),
}
}
}
impl ArithmeticOperand {
pub fn get_memory_reference(&self) -> Option<&MemoryReference> {
match self {
ArithmeticOperand::LiteralInteger(_) => None,
ArithmeticOperand::LiteralReal(_) => None,
ArithmeticOperand::MemoryReference(reference) => Some(reference),
}
}
}
impl Expression {
/// Return, if any, the memory references contained within this Expression.
pub fn get_memory_references(&self) -> Vec<&MemoryReference> |
}
impl WaveformInvocation {
/// Return, if any, the memory references contained within this WaveformInvocation.
pub fn get_memory_references(&self) -> Vec<&MemoryReference> {
let mut result = vec![];
for expression in self.parameters.values() {
result.extend(expression.get_memory_references());
}
result
}
}
| {
match self {
Expression::Address(reference) => vec![reference],
Expression::FunctionCall { expression, .. } => expression.get_memory_references(),
Expression::Infix { left, right, .. } => {
let mut result = left.get_memory_references();
result.extend(right.get_memory_references());
result
}
Expression::Number(_) => vec![],
Expression::PiConstant => vec![],
Expression::Prefix { expression, .. } => expression.get_memory_references(),
Expression::Variable(_) => vec![],
}
} |
common.py | import datetime
from pydantic import BaseModel, Field, validator
class DateTimeModelMixin(BaseModel):
created_at: datetime.datetime = None
updated_at: datetime.datetime = None
@validator('created_at', 'updated_at', pre=True)
def default_datetime(
cls, | value: datetime.datetime,
) -> datetime.datetime:
return value or datetime.datetime.now()
class IDModelMixin(BaseModel):
id_: int = Field(0, alias='id') | |
InstrutorDAOJDBC.rs | dao.JDBC.InstrutorDAOJDBC | ||
group_dynamic_member.py | ########
# Copyright (c) 2016 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
from cloudify import ctx
from cloudify.decorators import operation
import cloudify_nsx.library.nsx_security_group as nsx_security_group
import cloudify_nsx.library.nsx_common as common
@operation
def create(**kwargs):
kwargs = common.get_properties_update(
'dynamic_member', "security_group_id", kwargs,
target_relationship="cloudify.nsx.relationships.contained_in",
target_property="resource_id"
)
validation_rules = {
"security_group_id": {
"required": True
},
# dynamic member definition
"dynamic_set": {
"required": True
}
}
use_existing, dynamic_member = common.get_properties_and_validate(
'dynamic_member', kwargs, validation_rules
)
resource_id = ctx.instance.runtime_properties.get('resource_id')
if resource_id:
ctx.logger.info("Reused %s" % resource_id)
return
# credentials
client_session = common.nsx_login(kwargs)
resource_id = nsx_security_group.set_dynamic_member(
client_session,
dynamic_member['security_group_id'],
dynamic_member['dynamic_set']
)
ctx.instance.runtime_properties['resource_id'] = resource_id
ctx.logger.info("created %s" % (
resource_id
))
@operation
def delete(**kwargs):
| use_existing, dynamic_member = common.get_properties(
'dynamic_member', kwargs
)
if use_existing:
common.remove_properties('dynamic_member')
ctx.logger.info("Used existed")
return
resource_id = ctx.instance.runtime_properties.get('resource_id')
if not resource_id:
common.remove_properties('dynamic_member')
ctx.logger.info("Not fully created, skip")
return
# credentials
client_session = common.nsx_login(kwargs)
common.attempt_with_rerun(
nsx_security_group.del_dynamic_member,
client_session=client_session,
security_group_id=resource_id
)
ctx.logger.info("delete %s" % resource_id)
common.remove_properties('dynamic_member') |
|
initial.rs | use std::collections::HashSet;
use crate::*;
#[derive(Debug)]
pub struct Day01Initial<'a> {
input: &'a str,
} | impl<'a> AoC<'a> for Day01Initial<'a> {
type SolutionPart1 = Day01SolutionPart1;
type SolutionPart2 = Day01SolutionPart2;
fn description(&self) -> &'static str {
"Parse string dynamically"
}
fn new(input: &'a str) -> Day01Initial<'_> {
Day01Initial { input }
}
fn solution_part1(&self) -> Self::SolutionPart1 {
const SUM_TARGET: u64 = 2020;
// We search "left*right" such that "left + right == 2020"
// Store the whole input
let entries: Vec<_> = parse_input(self.input).collect();
let (left, right) = solution_part1(SUM_TARGET, &entries).expect("At least one pair");
left.0 * right.0
}
fn solution_part2(&self) -> Self::SolutionPart2 {
const SUM_TARGET: u64 = 2020;
// We search "left*middle*right" such that "left + middle + right == 2020"
// Store the whole input
let entries: Vec<_> = parse_input(self.input).collect();
for middle in &entries {
let target = SUM_TARGET - middle.0;
match solution_part1(target, &entries) {
None => continue,
Some((left, right)) => return left.0 * middle.0 * right.0,
}
}
0
}
}
fn solution_part1(sum_target: u64, entries: &[Day01Entry]) -> Option<(Day01Entry, Day01Entry)> {
// Copy into a hashset to test presence
let entries_set: HashSet<_> = entries.iter().collect();
// Find the presence in the hashset of the difference between the target number
// and the elements.
entries
.iter()
.filter_map(|e| {
// Use a filter_map to prevent underflow
sum_target.checked_sub(e.0).map(Day01Entry)
})
.find(|diff| entries_set.contains(diff))
.map(|left| {
// Now that we found the left element, the right one is the difference with the target
let right = Day01Entry(sum_target - left.0);
assert_eq!(left.0 + right.0, sum_target);
(left, right)
})
}
#[cfg(test)]
mod tests {
mod part1 {
mod solution {
use super::super::super::*;
use crate::{tests::init_logger, AoC, PUZZLE_INPUT};
#[test]
fn solution() {
init_logger();
let expected = 545379;
let to_check = Day01Initial::new(PUZZLE_INPUT).solution_part1();
assert_eq!(to_check, expected);
}
}
mod given {
use super::super::super::Day01Initial;
use crate::{tests::init_logger, AoC};
#[test]
fn ex01() {
init_logger();
let expected = 514579;
let input = "1721
979
366
299
675
1456";
let to_check = Day01Initial::new(input).solution_part1();
assert_eq!(to_check, expected);
}
}
/*
mod extra {
use super::super::super::Day01Initial;
use crate::{tests::init_logger, AoC, PUZZLE_INPUT};
}
*/
}
mod part2 {
mod solution {
use super::super::super::*;
use crate::{tests::init_logger, AoC, PUZZLE_INPUT};
#[test]
fn solution() {
init_logger();
let expected = 257778836;
let to_check = Day01Initial::new(PUZZLE_INPUT).solution_part2();
assert_eq!(to_check, expected);
}
}
mod given {
use super::super::super::Day01Initial;
use crate::{tests::init_logger, AoC};
#[test]
fn ex01() {
init_logger();
let expected = 241861950;
let input = "1721
979
366
299
675
1456";
let to_check = Day01Initial::new(input).solution_part2();
assert_eq!(to_check, expected);
}
}
/*
mod extra {
use super::super::super::Day01Initial;
use crate::{tests::init_logger, AoC, PUZZLE_INPUT};
}
*/
}
} | |
ze_generated_example_iothubresource_client_test.go | //go:build go1.18
// +build go1.18
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
package armiothub_test
import (
"context"
"log"
"github.com/Azure/azure-sdk-for-go/sdk/azcore/to"
"github.com/Azure/azure-sdk-for-go/sdk/azidentity"
"github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/iothub/armiothub"
)
// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/tree/main/specification/iothub/resource-manager/Microsoft.Devices/stable/2021-07-02/examples/iothub_get.json
func ExampleResourceClient_Get() {
cred, err := azidentity.NewDefaultAzureCredential(nil)
if err != nil {
log.Fatalf("failed to obtain a credential: %v", err)
}
ctx := context.Background()
client, err := armiothub.NewResourceClient("91d12660-3dec-467a-be2a-213b5544ddc0", cred, nil)
if err != nil {
log.Fatalf("failed to create client: %v", err)
}
res, err := client.Get(ctx,
"myResourceGroup",
"testHub",
nil)
if err != nil {
log.Fatalf("failed to finish the request: %v", err)
}
// TODO: use response item
_ = res
}
// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/tree/main/specification/iothub/resource-manager/Microsoft.Devices/stable/2021-07-02/examples/iothub_createOrUpdate.json
func ExampleResourceClient_BeginCreateOrUpdate() {
cred, err := azidentity.NewDefaultAzureCredential(nil)
if err != nil {
log.Fatalf("failed to obtain a credential: %v", err)
}
ctx := context.Background()
client, err := armiothub.NewResourceClient("91d12660-3dec-467a-be2a-213b5544ddc0", cred, nil)
if err != nil {
log.Fatalf("failed to create client: %v", err)
}
poller, err := client.BeginCreateOrUpdate(ctx,
"myResourceGroup",
"testHub",
armiothub.Description{
Location: to.Ptr("centraluseuap"),
Tags: map[string]*string{},
Etag: to.Ptr("AAAAAAFD6M4="),
Properties: &armiothub.Properties{
CloudToDevice: &armiothub.CloudToDeviceProperties{
DefaultTTLAsIso8601: to.Ptr("PT1H"),
Feedback: &armiothub.FeedbackProperties{
LockDurationAsIso8601: to.Ptr("PT1M"),
MaxDeliveryCount: to.Ptr[int32](10),
TTLAsIso8601: to.Ptr("PT1H"),
},
MaxDeliveryCount: to.Ptr[int32](10),
},
EnableDataResidency: to.Ptr(true),
EnableFileUploadNotifications: to.Ptr(false),
EventHubEndpoints: map[string]*armiothub.EventHubProperties{
"events": {
PartitionCount: to.Ptr[int32](2),
RetentionTimeInDays: to.Ptr[int64](1),
},
},
Features: to.Ptr(armiothub.CapabilitiesNone),
IPFilterRules: []*armiothub.IPFilterRule{},
MessagingEndpoints: map[string]*armiothub.MessagingEndpointProperties{
"fileNotifications": {
LockDurationAsIso8601: to.Ptr("PT1M"),
MaxDeliveryCount: to.Ptr[int32](10),
TTLAsIso8601: to.Ptr("PT1H"),
},
},
MinTLSVersion: to.Ptr("1.2"),
NetworkRuleSets: &armiothub.NetworkRuleSetProperties{
ApplyToBuiltInEventHubEndpoint: to.Ptr(true),
DefaultAction: to.Ptr(armiothub.DefaultActionDeny),
IPRules: []*armiothub.NetworkRuleSetIPRule{
{
Action: to.Ptr(armiothub.NetworkRuleIPActionAllow),
FilterName: to.Ptr("rule1"),
IPMask: to.Ptr("131.117.159.53"),
},
{
Action: to.Ptr(armiothub.NetworkRuleIPActionAllow),
FilterName: to.Ptr("rule2"),
IPMask: to.Ptr("157.55.59.128/25"),
}},
},
Routing: &armiothub.RoutingProperties{
Endpoints: &armiothub.RoutingEndpoints{
EventHubs: []*armiothub.RoutingEventHubProperties{},
ServiceBusQueues: []*armiothub.RoutingServiceBusQueueEndpointProperties{},
ServiceBusTopics: []*armiothub.RoutingServiceBusTopicEndpointProperties{},
StorageContainers: []*armiothub.RoutingStorageContainerProperties{},
},
FallbackRoute: &armiothub.FallbackRouteProperties{
Name: to.Ptr("$fallback"),
Condition: to.Ptr("true"),
EndpointNames: []*string{
to.Ptr("events")},
IsEnabled: to.Ptr(true),
Source: to.Ptr(armiothub.RoutingSourceDeviceMessages),
},
Routes: []*armiothub.RouteProperties{},
},
StorageEndpoints: map[string]*armiothub.StorageEndpointProperties{
"$default": {
ConnectionString: to.Ptr(""),
ContainerName: to.Ptr(""),
SasTTLAsIso8601: to.Ptr("PT1H"),
},
},
},
SKU: &armiothub.SKUInfo{
Name: to.Ptr(armiothub.IotHubSKUS1),
Capacity: to.Ptr[int64](1),
},
},
&armiothub.ResourceClientBeginCreateOrUpdateOptions{IfMatch: nil})
if err != nil {
log.Fatalf("failed to finish the request: %v", err)
}
res, err := poller.PollUntilDone(ctx, nil)
if err != nil {
log.Fatalf("failed to pull the result: %v", err)
}
// TODO: use response item
_ = res
}
// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/tree/main/specification/iothub/resource-manager/Microsoft.Devices/stable/2021-07-02/examples/iothub_patch.json
func ExampleResourceClient_BeginUpdate() {
cred, err := azidentity.NewDefaultAzureCredential(nil)
if err != nil {
log.Fatalf("failed to obtain a credential: %v", err)
}
ctx := context.Background()
client, err := armiothub.NewResourceClient("91d12660-3dec-467a-be2a-213b5544ddc0", cred, nil)
if err != nil {
log.Fatalf("failed to create client: %v", err)
}
poller, err := client.BeginUpdate(ctx,
"myResourceGroup",
"myHub",
armiothub.TagsResource{
Tags: map[string]*string{
"foo": to.Ptr("bar"),
},
},
nil)
if err != nil {
log.Fatalf("failed to finish the request: %v", err)
}
res, err := poller.PollUntilDone(ctx, nil)
if err != nil {
log.Fatalf("failed to pull the result: %v", err)
}
// TODO: use response item
_ = res
}
// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/tree/main/specification/iothub/resource-manager/Microsoft.Devices/stable/2021-07-02/examples/iothub_delete.json
func ExampleResourceClient_BeginDelete() {
cred, err := azidentity.NewDefaultAzureCredential(nil)
if err != nil {
log.Fatalf("failed to obtain a credential: %v", err)
}
ctx := context.Background()
client, err := armiothub.NewResourceClient("91d12660-3dec-467a-be2a-213b5544ddc0", cred, nil)
if err != nil {
log.Fatalf("failed to create client: %v", err)
}
poller, err := client.BeginDelete(ctx,
"myResourceGroup",
"testHub",
nil)
if err != nil {
log.Fatalf("failed to finish the request: %v", err)
}
res, err := poller.PollUntilDone(ctx, nil)
if err != nil {
log.Fatalf("failed to pull the result: %v", err)
}
// TODO: use response item
_ = res
}
// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/tree/main/specification/iothub/resource-manager/Microsoft.Devices/stable/2021-07-02/examples/iothub_listbysubscription.json
func ExampleResourceClient_NewListBySubscriptionPager() {
cred, err := azidentity.NewDefaultAzureCredential(nil)
if err != nil {
log.Fatalf("failed to obtain a credential: %v", err)
}
ctx := context.Background()
client, err := armiothub.NewResourceClient("91d12660-3dec-467a-be2a-213b5544ddc0", cred, nil)
if err != nil {
log.Fatalf("failed to create client: %v", err)
}
pager := client.NewListBySubscriptionPager(nil)
for pager.More() {
nextResult, err := pager.NextPage(ctx)
if err != nil {
log.Fatalf("failed to advance page: %v", err)
}
for _, v := range nextResult.Value {
// TODO: use page item
_ = v
}
}
}
// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/tree/main/specification/iothub/resource-manager/Microsoft.Devices/stable/2021-07-02/examples/iothub_listbyrg.json
func ExampleResourceClient_NewListByResourceGroupPager() {
cred, err := azidentity.NewDefaultAzureCredential(nil)
if err != nil {
log.Fatalf("failed to obtain a credential: %v", err)
}
ctx := context.Background()
client, err := armiothub.NewResourceClient("91d12660-3dec-467a-be2a-213b5544ddc0", cred, nil)
if err != nil {
log.Fatalf("failed to create client: %v", err)
}
pager := client.NewListByResourceGroupPager("myResourceGroup",
nil)
for pager.More() {
nextResult, err := pager.NextPage(ctx)
if err != nil {
log.Fatalf("failed to advance page: %v", err)
}
for _, v := range nextResult.Value {
// TODO: use page item
_ = v
}
}
}
// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/tree/main/specification/iothub/resource-manager/Microsoft.Devices/stable/2021-07-02/examples/iothub_stats.json
func ExampleResourceClient_GetStats() {
cred, err := azidentity.NewDefaultAzureCredential(nil)
if err != nil {
log.Fatalf("failed to obtain a credential: %v", err)
}
ctx := context.Background()
client, err := armiothub.NewResourceClient("91d12660-3dec-467a-be2a-213b5544ddc0", cred, nil)
if err != nil {
log.Fatalf("failed to create client: %v", err)
}
res, err := client.GetStats(ctx,
"myResourceGroup",
"testHub",
nil)
if err != nil {
log.Fatalf("failed to finish the request: %v", err)
}
// TODO: use response item
_ = res
}
// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/tree/main/specification/iothub/resource-manager/Microsoft.Devices/stable/2021-07-02/examples/iothub_listehgroups.json
func ExampleResourceClient_NewListEventHubConsumerGroupsPager() {
cred, err := azidentity.NewDefaultAzureCredential(nil)
if err != nil {
log.Fatalf("failed to obtain a credential: %v", err)
}
ctx := context.Background()
client, err := armiothub.NewResourceClient("91d12660-3dec-467a-be2a-213b5544ddc0", cred, nil)
if err != nil {
log.Fatalf("failed to create client: %v", err)
}
pager := client.NewListEventHubConsumerGroupsPager("myResourceGroup",
"testHub",
"events",
nil)
for pager.More() {
nextResult, err := pager.NextPage(ctx)
if err != nil {
log.Fatalf("failed to advance page: %v", err)
}
for _, v := range nextResult.Value {
// TODO: use page item
_ = v
}
}
}
// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/tree/main/specification/iothub/resource-manager/Microsoft.Devices/stable/2021-07-02/examples/iothub_getconsumergroup.json
func ExampleResourceClient_GetEventHubConsumerGroup() {
cred, err := azidentity.NewDefaultAzureCredential(nil)
if err != nil {
log.Fatalf("failed to obtain a credential: %v", err)
}
ctx := context.Background()
client, err := armiothub.NewResourceClient("91d12660-3dec-467a-be2a-213b5544ddc0", cred, nil)
if err != nil {
log.Fatalf("failed to create client: %v", err)
}
res, err := client.GetEventHubConsumerGroup(ctx,
"myResourceGroup",
"testHub",
"events",
"test",
nil)
if err != nil {
log.Fatalf("failed to finish the request: %v", err)
}
// TODO: use response item
_ = res
}
// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/tree/main/specification/iothub/resource-manager/Microsoft.Devices/stable/2021-07-02/examples/iothub_createconsumergroup.json
func ExampleResourceClient_CreateEventHubConsumerGroup() {
cred, err := azidentity.NewDefaultAzureCredential(nil)
if err != nil {
log.Fatalf("failed to obtain a credential: %v", err)
}
ctx := context.Background()
client, err := armiothub.NewResourceClient("91d12660-3dec-467a-be2a-213b5544ddc0", cred, nil)
if err != nil {
log.Fatalf("failed to create client: %v", err)
}
res, err := client.CreateEventHubConsumerGroup(ctx,
"myResourceGroup",
"testHub",
"events",
"test",
armiothub.EventHubConsumerGroupBodyDescription{
Properties: &armiothub.EventHubConsumerGroupName{
Name: to.Ptr("test"),
},
},
nil)
if err != nil {
log.Fatalf("failed to finish the request: %v", err)
}
// TODO: use response item
_ = res
}
// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/tree/main/specification/iothub/resource-manager/Microsoft.Devices/stable/2021-07-02/examples/iothub_deleteconsumergroup.json
func ExampleResourceClient_DeleteEventHubConsumerGroup() {
cred, err := azidentity.NewDefaultAzureCredential(nil)
if err != nil {
log.Fatalf("failed to obtain a credential: %v", err)
}
ctx := context.Background()
client, err := armiothub.NewResourceClient("91d12660-3dec-467a-be2a-213b5544ddc0", cred, nil)
if err != nil |
_, err = client.DeleteEventHubConsumerGroup(ctx,
"myResourceGroup",
"testHub",
"events",
"test",
nil)
if err != nil {
log.Fatalf("failed to finish the request: %v", err)
}
}
// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/tree/main/specification/iothub/resource-manager/Microsoft.Devices/stable/2021-07-02/examples/iothub_listjobs.json
func ExampleResourceClient_NewListJobsPager() {
cred, err := azidentity.NewDefaultAzureCredential(nil)
if err != nil {
log.Fatalf("failed to obtain a credential: %v", err)
}
ctx := context.Background()
client, err := armiothub.NewResourceClient("91d12660-3dec-467a-be2a-213b5544ddc0", cred, nil)
if err != nil {
log.Fatalf("failed to create client: %v", err)
}
pager := client.NewListJobsPager("myResourceGroup",
"testHub",
nil)
for pager.More() {
nextResult, err := pager.NextPage(ctx)
if err != nil {
log.Fatalf("failed to advance page: %v", err)
}
for _, v := range nextResult.Value {
// TODO: use page item
_ = v
}
}
}
// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/tree/main/specification/iothub/resource-manager/Microsoft.Devices/stable/2021-07-02/examples/iothub_getjob.json
func ExampleResourceClient_GetJob() {
cred, err := azidentity.NewDefaultAzureCredential(nil)
if err != nil {
log.Fatalf("failed to obtain a credential: %v", err)
}
ctx := context.Background()
client, err := armiothub.NewResourceClient("91d12660-3dec-467a-be2a-213b5544ddc0", cred, nil)
if err != nil {
log.Fatalf("failed to create client: %v", err)
}
res, err := client.GetJob(ctx,
"myResourceGroup",
"testHub",
"test",
nil)
if err != nil {
log.Fatalf("failed to finish the request: %v", err)
}
// TODO: use response item
_ = res
}
// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/tree/main/specification/iothub/resource-manager/Microsoft.Devices/stable/2021-07-02/examples/iothub_quotametrics.json
func ExampleResourceClient_NewGetQuotaMetricsPager() {
cred, err := azidentity.NewDefaultAzureCredential(nil)
if err != nil {
log.Fatalf("failed to obtain a credential: %v", err)
}
ctx := context.Background()
client, err := armiothub.NewResourceClient("91d12660-3dec-467a-be2a-213b5544ddc0", cred, nil)
if err != nil {
log.Fatalf("failed to create client: %v", err)
}
pager := client.NewGetQuotaMetricsPager("myResourceGroup",
"testHub",
nil)
for pager.More() {
nextResult, err := pager.NextPage(ctx)
if err != nil {
log.Fatalf("failed to advance page: %v", err)
}
for _, v := range nextResult.Value {
// TODO: use page item
_ = v
}
}
}
// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/tree/main/specification/iothub/resource-manager/Microsoft.Devices/stable/2021-07-02/examples/iothub_routingendpointhealth.json
func ExampleResourceClient_NewGetEndpointHealthPager() {
cred, err := azidentity.NewDefaultAzureCredential(nil)
if err != nil {
log.Fatalf("failed to obtain a credential: %v", err)
}
ctx := context.Background()
client, err := armiothub.NewResourceClient("91d12660-3dec-467a-be2a-213b5544ddc0", cred, nil)
if err != nil {
log.Fatalf("failed to create client: %v", err)
}
pager := client.NewGetEndpointHealthPager("myResourceGroup",
"testHub",
nil)
for pager.More() {
nextResult, err := pager.NextPage(ctx)
if err != nil {
log.Fatalf("failed to advance page: %v", err)
}
for _, v := range nextResult.Value {
// TODO: use page item
_ = v
}
}
}
// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/tree/main/specification/iothub/resource-manager/Microsoft.Devices/stable/2021-07-02/examples/checkNameAvailability.json
func ExampleResourceClient_CheckNameAvailability() {
cred, err := azidentity.NewDefaultAzureCredential(nil)
if err != nil {
log.Fatalf("failed to obtain a credential: %v", err)
}
ctx := context.Background()
client, err := armiothub.NewResourceClient("91d12660-3dec-467a-be2a-213b5544ddc0", cred, nil)
if err != nil {
log.Fatalf("failed to create client: %v", err)
}
res, err := client.CheckNameAvailability(ctx,
armiothub.OperationInputs{
Name: to.Ptr("test-request"),
},
nil)
if err != nil {
log.Fatalf("failed to finish the request: %v", err)
}
// TODO: use response item
_ = res
}
// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/tree/main/specification/iothub/resource-manager/Microsoft.Devices/stable/2021-07-02/examples/iothub_testallroutes.json
func ExampleResourceClient_TestAllRoutes() {
cred, err := azidentity.NewDefaultAzureCredential(nil)
if err != nil {
log.Fatalf("failed to obtain a credential: %v", err)
}
ctx := context.Background()
client, err := armiothub.NewResourceClient("91d12660-3dec-467a-be2a-213b5544ddc0", cred, nil)
if err != nil {
log.Fatalf("failed to create client: %v", err)
}
res, err := client.TestAllRoutes(ctx,
"testHub",
"myResourceGroup",
armiothub.TestAllRoutesInput{
Message: &armiothub.RoutingMessage{
AppProperties: map[string]*string{
"key1": to.Ptr("value1"),
},
Body: to.Ptr("Body of message"),
SystemProperties: map[string]*string{
"key1": to.Ptr("value1"),
},
},
RoutingSource: to.Ptr(armiothub.RoutingSourceDeviceMessages),
},
nil)
if err != nil {
log.Fatalf("failed to finish the request: %v", err)
}
// TODO: use response item
_ = res
}
// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/tree/main/specification/iothub/resource-manager/Microsoft.Devices/stable/2021-07-02/examples/iothub_testnewroute.json
func ExampleResourceClient_TestRoute() {
cred, err := azidentity.NewDefaultAzureCredential(nil)
if err != nil {
log.Fatalf("failed to obtain a credential: %v", err)
}
ctx := context.Background()
client, err := armiothub.NewResourceClient("91d12660-3dec-467a-be2a-213b5544ddc0", cred, nil)
if err != nil {
log.Fatalf("failed to create client: %v", err)
}
res, err := client.TestRoute(ctx,
"testHub",
"myResourceGroup",
armiothub.TestRouteInput{
Message: &armiothub.RoutingMessage{
AppProperties: map[string]*string{
"key1": to.Ptr("value1"),
},
Body: to.Ptr("Body of message"),
SystemProperties: map[string]*string{
"key1": to.Ptr("value1"),
},
},
Route: &armiothub.RouteProperties{
Name: to.Ptr("Routeid"),
EndpointNames: []*string{
to.Ptr("id1")},
IsEnabled: to.Ptr(true),
Source: to.Ptr(armiothub.RoutingSourceDeviceMessages),
},
},
nil)
if err != nil {
log.Fatalf("failed to finish the request: %v", err)
}
// TODO: use response item
_ = res
}
// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/tree/main/specification/iothub/resource-manager/Microsoft.Devices/stable/2021-07-02/examples/iothub_listkeys.json
func ExampleResourceClient_NewListKeysPager() {
cred, err := azidentity.NewDefaultAzureCredential(nil)
if err != nil {
log.Fatalf("failed to obtain a credential: %v", err)
}
ctx := context.Background()
client, err := armiothub.NewResourceClient("91d12660-3dec-467a-be2a-213b5544ddc0", cred, nil)
if err != nil {
log.Fatalf("failed to create client: %v", err)
}
pager := client.NewListKeysPager("myResourceGroup",
"testHub",
nil)
for pager.More() {
nextResult, err := pager.NextPage(ctx)
if err != nil {
log.Fatalf("failed to advance page: %v", err)
}
for _, v := range nextResult.Value {
// TODO: use page item
_ = v
}
}
}
// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/tree/main/specification/iothub/resource-manager/Microsoft.Devices/stable/2021-07-02/examples/iothub_getkey.json
func ExampleResourceClient_GetKeysForKeyName() {
cred, err := azidentity.NewDefaultAzureCredential(nil)
if err != nil {
log.Fatalf("failed to obtain a credential: %v", err)
}
ctx := context.Background()
client, err := armiothub.NewResourceClient("91d12660-3dec-467a-be2a-213b5544ddc0", cred, nil)
if err != nil {
log.Fatalf("failed to create client: %v", err)
}
res, err := client.GetKeysForKeyName(ctx,
"myResourceGroup",
"testHub",
"iothubowner",
nil)
if err != nil {
log.Fatalf("failed to finish the request: %v", err)
}
// TODO: use response item
_ = res
}
// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/tree/main/specification/iothub/resource-manager/Microsoft.Devices/stable/2021-07-02/examples/iothub_exportdevices.json
func ExampleResourceClient_ExportDevices() {
cred, err := azidentity.NewDefaultAzureCredential(nil)
if err != nil {
log.Fatalf("failed to obtain a credential: %v", err)
}
ctx := context.Background()
client, err := armiothub.NewResourceClient("91d12660-3dec-467a-be2a-213b5544ddc0", cred, nil)
if err != nil {
log.Fatalf("failed to create client: %v", err)
}
res, err := client.ExportDevices(ctx,
"myResourceGroup",
"testHub",
armiothub.ExportDevicesRequest{
ExcludeKeys: to.Ptr(true),
ExportBlobContainerURI: to.Ptr("testBlob"),
},
nil)
if err != nil {
log.Fatalf("failed to finish the request: %v", err)
}
// TODO: use response item
_ = res
}
// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/tree/main/specification/iothub/resource-manager/Microsoft.Devices/stable/2021-07-02/examples/iothub_importdevices.json
func ExampleResourceClient_ImportDevices() {
cred, err := azidentity.NewDefaultAzureCredential(nil)
if err != nil {
log.Fatalf("failed to obtain a credential: %v", err)
}
ctx := context.Background()
client, err := armiothub.NewResourceClient("91d12660-3dec-467a-be2a-213b5544ddc0", cred, nil)
if err != nil {
log.Fatalf("failed to create client: %v", err)
}
res, err := client.ImportDevices(ctx,
"myResourceGroup",
"testHub",
armiothub.ImportDevicesRequest{
InputBlobContainerURI: to.Ptr("testBlob"),
OutputBlobContainerURI: to.Ptr("testBlob"),
},
nil)
if err != nil {
log.Fatalf("failed to finish the request: %v", err)
}
// TODO: use response item
_ = res
}
| {
log.Fatalf("failed to create client: %v", err)
} |
index.iife.js | ;var VueDemi = (function (VueDemi, Vue, VueCompositionAPI) {
if (VueDemi.install) {
return VueDemi
}
if (Vue) {
if (Vue.version.slice(0, 2) === '2.') {
if (VueCompositionAPI) {
for (var key in VueCompositionAPI) {
VueDemi[key] = VueCompositionAPI[key]
}
VueDemi.isVue2 = true
VueDemi.isVue3 = false
VueDemi.install = function (){}
VueDemi.Vue = Vue
VueDemi.Vue2 = Vue
VueDemi.version = Vue.version
} else {
console.error(
'[vue-demi] no VueCompositionAPI instance found, please be sure to import `@vue/composition-api` before `vue-demi`.'
)
}
} else if (Vue.version.slice(0, 2) === '3.') {
for (var key in Vue) {
VueDemi[key] = Vue[key]
}
VueDemi.isVue2 = false
VueDemi.isVue3 = true
VueDemi.install = function (){}
VueDemi.Vue = Vue
VueDemi.Vue2 = undefined
VueDemi.version = Vue.version
VueDemi.set = function(target, key, val) {
if (Array.isArray(target)) {
target.length = Math.max(target.length, key)
target.splice(key, 1, val)
return val
}
target[key] = val
return val
}
VueDemi.del = function(target, key) {
if (Array.isArray(target)) {
target.splice(key, 1)
return
}
delete target[key]
}
} else {
console.error('[vue-demi] Vue version ' + Vue.version + ' is unsupported.')
}
} else {
console.error(
'[vue-demi] no Vue instance found, please be sure to import `vue` before `vue-demi`.'
)
}
return VueDemi
})(
this.VueDemi = this.VueDemi || (typeof VueDemi !== "undefined" ? VueDemi : {}),
this.Vue || (typeof Vue !== "undefined" ? Vue : undefined),
this.VueCompositionAPI || (typeof VueCompositionAPI !== "undefined" ? VueCompositionAPI : undefined)
);
;
;(function (exports, shared, vueDemi, core) {
'use strict';
function computedAsync(evaluationCallback, initialState, optionsOrRef) {
let options;
if (vueDemi.isRef(optionsOrRef)) {
options = {
evaluating: optionsOrRef
};
} else {
options = optionsOrRef || {};
}
const {
lazy = false,
evaluating = void 0,
onError = shared.noop
} = options;
const started = vueDemi.ref(!lazy);
const current = vueDemi.ref(initialState);
let counter = 0;
vueDemi.watchEffect(async (onInvalidate) => {
if (!started.value)
return;
counter++;
const counterAtBeginning = counter;
let hasFinished = false;
if (evaluating) {
Promise.resolve().then(() => {
evaluating.value = true;
});
}
try {
const result = await evaluationCallback((cancelCallback) => {
onInvalidate(() => {
if (evaluating)
evaluating.value = false;
if (!hasFinished)
cancelCallback();
});
});
if (counterAtBeginning === counter)
current.value = result;
} catch (e) {
onError(e);
} finally {
if (evaluating)
evaluating.value = false;
hasFinished = true;
}
});
if (lazy) {
return vueDemi.computed(() => {
started.value = true;
return current.value;
});
} else {
return current;
}
}
function computedInject(key, options, defaultSource, treatDefaultAsFactory) {
let source = vueDemi.inject(key);
if (defaultSource)
source = vueDemi.inject(key, defaultSource);
if (treatDefaultAsFactory)
source = vueDemi.inject(key, defaultSource, treatDefaultAsFactory);
if (typeof options === "function") {
return vueDemi.computed((ctx) => options(source, ctx));
} else {
return vueDemi.computed({
get: (ctx) => options.get(source, ctx),
set: options.set
});
}
}
const createUnrefFn = (fn) => {
return function(...args) {
return fn.apply(this, args.map((i) => vueDemi.unref(i)));
};
};
function unrefElement(elRef) {
var _a;
const plain = vueDemi.unref(elRef);
return (_a = plain == null ? void 0 : plain.$el) != null ? _a : plain;
}
const defaultWindow = shared.isClient ? window : void 0;
const defaultDocument = shared.isClient ? window.document : void 0;
const defaultNavigator = shared.isClient ? window.navigator : void 0;
const defaultLocation = shared.isClient ? window.location : void 0;
function useEventListener(...args) {
let target;
let event;
let listener;
let options;
if (shared.isString(args[0])) {
[event, listener, options] = args;
target = defaultWindow;
} else {
[target, event, listener, options] = args;
}
if (!target)
return shared.noop;
let cleanup = shared.noop;
const stopWatch = vueDemi.watch(() => unrefElement(target), (el) => {
cleanup();
if (!el)
return;
el.addEventListener(event, listener, options);
cleanup = () => {
el.removeEventListener(event, listener, options);
cleanup = shared.noop;
};
}, { immediate: true, flush: "post" });
const stop = () => {
stopWatch();
cleanup();
};
shared.tryOnScopeDispose(stop);
return stop;
}
function onClickOutside(target, handler, options = {}) {
const { window = defaultWindow, ignore, capture = true } = options;
if (!window)
return;
const shouldListen = vueDemi.ref(true);
const listener = (event) => {
const el = unrefElement(target);
const composedPath = event.composedPath();
if (!el || el === event.target || composedPath.includes(el) || !shouldListen.value)
return;
if (ignore && ignore.length > 0) {
if (ignore.some((target2) => {
const el2 = unrefElement(target2);
return el2 && (event.target === el2 || composedPath.includes(el2));
}))
return;
}
handler(event);
};
const cleanup = [
useEventListener(window, "click", listener, { passive: true, capture }),
useEventListener(window, "pointerdown", (e) => {
const el = unrefElement(target);
shouldListen.value = !!el && !e.composedPath().includes(el);
}, { passive: true })
];
const stop = () => cleanup.forEach((fn) => fn());
return stop;
}
var __defProp$h = Object.defineProperty;
var __defProps$8 = Object.defineProperties;
var __getOwnPropDescs$8 = Object.getOwnPropertyDescriptors;
var __getOwnPropSymbols$j = Object.getOwnPropertySymbols;
var __hasOwnProp$j = Object.prototype.hasOwnProperty;
var __propIsEnum$j = Object.prototype.propertyIsEnumerable;
var __defNormalProp$h = (obj, key, value) => key in obj ? __defProp$h(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
var __spreadValues$h = (a, b) => {
for (var prop in b || (b = {}))
if (__hasOwnProp$j.call(b, prop))
__defNormalProp$h(a, prop, b[prop]);
if (__getOwnPropSymbols$j)
for (var prop of __getOwnPropSymbols$j(b)) {
if (__propIsEnum$j.call(b, prop))
__defNormalProp$h(a, prop, b[prop]);
}
return a;
};
var __spreadProps$8 = (a, b) => __defProps$8(a, __getOwnPropDescs$8(b));
const createKeyPredicate = (keyFilter) => {
if (typeof keyFilter === "function")
return keyFilter;
else if (typeof keyFilter === "string")
return (event) => event.key === keyFilter;
else if (Array.isArray(keyFilter))
return (event) => keyFilter.includes(event.key);
else if (keyFilter)
return () => true;
else
return () => false;
};
function onKeyStroke(key, handler, options = {}) {
const { target = defaultWindow, eventName = "keydown", passive = false } = options;
const predicate = createKeyPredicate(key);
const listener = (e) => {
if (predicate(e))
handler(e);
};
return useEventListener(target, eventName, listener, passive);
}
function onKeyDown(key, handler, options = {}) {
return onKeyStroke(key, handler, __spreadProps$8(__spreadValues$h({}, options), { eventName: "keydown" }));
}
function onKeyPressed(key, handler, options = {}) {
return onKeyStroke(key, handler, __spreadProps$8(__spreadValues$h({}, options), { eventName: "keypress" }));
}
function onKeyUp(key, handler, options = {}) {
return onKeyStroke(key, handler, __spreadProps$8(__spreadValues$h({}, options), { eventName: "keyup" }));
}
const DEFAULT_DELAY = 500;
function onLongPress(target, handler, options) {
const elementRef = vueDemi.computed(() => core.unrefElement(target));
let timeout = null;
function clear() {
if (timeout != null) {
clearTimeout(timeout);
timeout = null;
}
}
function onDown(ev) {
var _a;
clear();
timeout = setTimeout(() => handler(ev), (_a = options == null ? void 0 : options.delay) != null ? _a : DEFAULT_DELAY);
}
core.useEventListener(elementRef, "pointerdown", onDown);
core.useEventListener(elementRef, "pointerup", clear);
core.useEventListener(elementRef, "pointerleave", clear);
}
const isFocusedElementEditable = () => {
const { activeElement, body } = document;
if (!activeElement)
return false;
if (activeElement === body)
return false;
switch (activeElement.tagName) {
case "INPUT":
case "TEXTAREA":
return true;
}
return activeElement.hasAttribute("contenteditable");
};
const isTypedCharValid = ({
keyCode,
metaKey,
ctrlKey,
altKey
}) => {
if (metaKey || ctrlKey || altKey)
return false;
if (keyCode >= 48 && keyCode <= 57 || keyCode >= 96 && keyCode <= 105)
return true;
if (keyCode >= 65 && keyCode <= 90)
return true;
return false;
};
function onStartTyping(callback, options = {}) {
const { document: document2 = defaultDocument } = options;
const keydown = (event) => {
!isFocusedElementEditable() && isTypedCharValid(event) && callback(event);
};
if (document2)
useEventListener(document2, "keydown", keydown, { passive: true });
}
function templateRef(key, initialValue = null) {
const instance = vueDemi.getCurrentInstance();
let _trigger = () => {
};
const element = vueDemi.customRef((track, trigger) => {
_trigger = trigger;
return {
get() {
var _a, _b;
track();
return (_b = (_a = instance == null ? void 0 : instance.proxy) == null ? void 0 : _a.$refs[key]) != null ? _b : initialValue;
},
set() {
}
};
});
shared.tryOnMounted(_trigger);
vueDemi.onUpdated(_trigger);
return element;
}
function useActiveElement(options = {}) {
const { window = defaultWindow } = options;
const counter = vueDemi.ref(0);
if (window) {
useEventListener(window, "blur", () => counter.value += 1, true);
useEventListener(window, "focus", () => counter.value += 1, true);
}
return vueDemi.computed(() => {
counter.value;
return window == null ? void 0 : window.document.activeElement;
});
}
function useAsyncQueue(tasks, options = {}) {
const {
interrupt = true,
onError = shared.noop,
onFinished = shared.noop
} = options;
const promiseState = {
pending: "pending",
rejected: "rejected",
fulfilled: "fulfilled"
};
const initialResult = Array.from(new Array(tasks.length), () => ({ state: promiseState.pending, data: null }));
const result = vueDemi.reactive(initialResult);
const activeIndex = vueDemi.ref(-1);
if (!tasks || tasks.length === 0) {
onFinished();
return {
activeIndex,
result
};
}
function updateResult(state, res) {
activeIndex.value++;
result[activeIndex.value].data = res;
result[activeIndex.value].state = state;
}
tasks.reduce((prev, curr) => {
return prev.then((prevRes) => {
var _a;
if (((_a = result[activeIndex.value]) == null ? void 0 : _a.state) === promiseState.rejected && interrupt) {
onFinished();
return;
}
return curr(prevRes).then((currentRes) => {
updateResult(promiseState.fulfilled, currentRes);
activeIndex.value === tasks.length - 1 && onFinished();
return currentRes;
});
}).catch((e) => {
updateResult(promiseState.rejected, e);
onError();
return e;
});
}, Promise.resolve());
return {
activeIndex,
result
};
}
function useAsyncState(promise, initialState, options) {
const {
immediate = true,
delay = 0,
onError = shared.noop,
resetOnExecute = true,
shallow = true
} = options != null ? options : {};
const state = shallow ? vueDemi.shallowRef(initialState) : vueDemi.ref(initialState);
const isReady = vueDemi.ref(false);
const isLoading = vueDemi.ref(false);
const error = vueDemi.ref(void 0);
async function execute(delay2 = 0, ...args) {
if (resetOnExecute)
state.value = initialState;
error.value = void 0;
isReady.value = false;
isLoading.value = true;
if (delay2 > 0)
await shared.promiseTimeout(delay2);
const _promise = typeof promise === "function" ? promise(...args) : promise;
try {
const data = await _promise;
state.value = data;
isReady.value = true;
} catch (e) {
error.value = e;
onError(e);
}
isLoading.value = false;
return state.value;
}
if (immediate)
execute(delay);
return {
state,
isReady,
isLoading,
error,
execute
};
}
function useBase64(target, options) {
const base64 = vueDemi.ref("");
const promise = vueDemi.ref();
function execute() {
if (!shared.isClient)
return;
promise.value = new Promise((resolve, reject) => {
try {
const _target = vueDemi.unref(target);
if (_target === void 0 || _target === null) {
resolve("");
} else if (typeof _target === "string") {
resolve(blobToBase64(new Blob([_target], { type: "text/plain" })));
} else if (_target instanceof Blob) {
resolve(blobToBase64(_target));
} else if (_target instanceof ArrayBuffer) {
resolve(window.btoa(String.fromCharCode(...new Uint8Array(_target))));
} else if (_target instanceof HTMLCanvasElement) {
resolve(_target.toDataURL(options == null ? void 0 : options.type, options == null ? void 0 : options.quality));
} else if (_target instanceof HTMLImageElement) {
const img = _target.cloneNode(false);
img.crossOrigin = "Anonymous";
imgLoaded(img).then(() => {
const canvas = document.createElement("canvas");
const ctx = canvas.getContext("2d");
canvas.width = img.width;
canvas.height = img.height;
ctx.drawImage(img, 0, 0, canvas.width, canvas.height);
resolve(canvas.toDataURL(options == null ? void 0 : options.type, options == null ? void 0 : options.quality));
}).catch(reject);
} else {
reject(new Error("target is unsupported types"));
}
} catch (error) {
reject(error);
}
});
promise.value.then((res) => base64.value = res);
return promise.value;
}
vueDemi.watch(target, execute, { immediate: true });
return {
base64,
promise,
execute
};
}
function imgLoaded(img) {
return new Promise((resolve, reject) => {
if (!img.complete) {
img.onload = () => {
resolve();
};
img.onerror = reject;
} else {
resolve();
}
});
}
function blobToBase64(blob) {
return new Promise((resolve, reject) => {
const fr = new FileReader();
fr.onload = (e) => {
resolve(e.target.result);
};
fr.onerror = reject;
fr.readAsDataURL(blob);
});
}
function useBattery({ navigator = defaultNavigator } = {}) {
const events = ["chargingchange", "chargingtimechange", "dischargingtimechange", "levelchange"];
const isSupported = navigator && "getBattery" in navigator;
const charging = vueDemi.ref(false);
const chargingTime = vueDemi.ref(0);
const dischargingTime = vueDemi.ref(0);
const level = vueDemi.ref(1);
let battery;
function updateBatteryInfo() {
charging.value = this.charging;
chargingTime.value = this.chargingTime || 0;
dischargingTime.value = this.dischargingTime || 0;
level.value = this.level;
}
if (isSupported) {
navigator.getBattery().then((_battery) => {
battery = _battery;
updateBatteryInfo.call(battery);
for (const event of events)
useEventListener(battery, event, updateBatteryInfo, { passive: true });
});
}
return {
isSupported,
charging,
chargingTime,
dischargingTime,
level
};
}
function useMediaQuery(query, options = {}) {
const { window = defaultWindow } = options;
let mediaQuery;
const matches = vueDemi.ref(false);
const update = () => {
if (!window)
return;
if (!mediaQuery)
mediaQuery = window.matchMedia(query);
matches.value = mediaQuery.matches;
};
shared.tryOnBeforeMount(() => {
update();
if (!mediaQuery)
return;
if ("addEventListener" in mediaQuery)
mediaQuery.addEventListener("change", update);
else
mediaQuery.addListener(update);
shared.tryOnScopeDispose(() => {
if ("removeEventListener" in mediaQuery)
mediaQuery.removeEventListener("change", update);
else
mediaQuery.removeListener(update);
});
});
return matches;
}
const breakpointsTailwind = {
"sm": 640,
"md": 768,
"lg": 1024,
"xl": 1280,
"2xl": 1536
};
const breakpointsBootstrapV5 = {
sm: 576,
md: 768,
lg: 992,
xl: 1200,
xxl: 1400
};
const breakpointsVuetify = {
xs: 600,
sm: 960,
md: 1264,
lg: 1904
};
const breakpointsAntDesign = {
xs: 480,
sm: 576,
md: 768,
lg: 992,
xl: 1200,
xxl: 1600
};
const breakpointsQuasar = {
xs: 600,
sm: 1024,
md: 1440,
lg: 1920
};
const breakpointsSematic = {
mobileS: 320,
mobileM: 375,
mobileL: 425,
tablet: 768,
laptop: 1024,
laptopL: 1440,
desktop4K: 2560
};
var __defProp$g = Object.defineProperty;
var __getOwnPropSymbols$i = Object.getOwnPropertySymbols;
var __hasOwnProp$i = Object.prototype.hasOwnProperty;
var __propIsEnum$i = Object.prototype.propertyIsEnumerable;
var __defNormalProp$g = (obj, key, value) => key in obj ? __defProp$g(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
var __spreadValues$g = (a, b) => {
for (var prop in b || (b = {}))
if (__hasOwnProp$i.call(b, prop))
__defNormalProp$g(a, prop, b[prop]);
if (__getOwnPropSymbols$i)
for (var prop of __getOwnPropSymbols$i(b)) {
if (__propIsEnum$i.call(b, prop))
__defNormalProp$g(a, prop, b[prop]);
}
return a;
};
function useBreakpoints(breakpoints, options = {}) {
function getValue(k, delta) {
let v = breakpoints[k];
if (delta != null)
v = shared.increaseWithUnit(v, delta);
if (typeof v === "number")
v = `${v}px`;
return v;
}
const { window = defaultWindow } = options;
function match(query) {
if (!window)
return false;
return window.matchMedia(query).matches;
}
const greater = (k) => {
return useMediaQuery(`(min-width: ${getValue(k)})`, options);
};
const shortcutMethods = Object.keys(breakpoints).reduce((shortcuts, k) => {
Object.defineProperty(shortcuts, k, {
get: () => greater(k),
enumerable: true,
configurable: true
});
return shortcuts;
}, {});
return __spreadValues$g({
greater,
smaller(k) {
return useMediaQuery(`(max-width: ${getValue(k, -0.1)})`, options);
},
between(a, b) {
return useMediaQuery(`(min-width: ${getValue(a)}) and (max-width: ${getValue(b, -0.1)})`, options);
},
isGreater(k) {
return match(`(min-width: ${getValue(k)})`);
},
isSmaller(k) {
return match(`(max-width: ${getValue(k, -0.1)})`);
},
isInBetween(a, b) {
return match(`(min-width: ${getValue(a)}) and (max-width: ${getValue(b, -0.1)})`);
}
}, shortcutMethods);
}
const useBroadcastChannel = (options) => {
const {
name,
window = defaultWindow
} = options;
const isSupported = window && "BroadcastChannel" in window;
const isClosed = vueDemi.ref(false);
const channel = vueDemi.ref();
const data = vueDemi.ref();
const error = vueDemi.ref(null);
const post = (data2) => {
if (channel.value)
channel.value.postMessage(data2);
};
const close = () => {
if (channel.value)
channel.value.close();
isClosed.value = true;
};
if (isSupported) {
shared.tryOnMounted(() => {
error.value = null;
channel.value = new BroadcastChannel(name);
channel.value.addEventListener("message", (e) => {
data.value = e.data;
}, { passive: true });
channel.value.addEventListener("messageerror", (e) => {
error.value = e;
}, { passive: true });
channel.value.addEventListener("close", () => {
isClosed.value = true;
});
});
}
shared.tryOnScopeDispose(() => {
close();
});
return {
isSupported,
channel,
data,
post,
close,
error,
isClosed
};
};
function useBrowserLocation({ window = defaultWindow } = {}) {
const buildState = (trigger) => {
const { state: state2, length } = (window == null ? void 0 : window.history) || {};
const { hash, host, hostname, href, origin, pathname, port, protocol, search } = (window == null ? void 0 : window.location) || {};
return {
trigger,
state: state2,
length,
hash,
host,
hostname,
href,
origin,
pathname,
port,
protocol,
search
};
};
const state = vueDemi.ref(buildState("load"));
if (window) {
useEventListener(window, "popstate", () => state.value = buildState("popstate"), { passive: true });
useEventListener(window, "hashchange", () => state.value = buildState("hashchange"), { passive: true });
}
return state;
}
function useCached(refValue, comparator = (a, b) => a === b, watchOptions) {
const cachedValue = vueDemi.ref(refValue.value);
vueDemi.watch(() => refValue.value, (value) => {
if (!comparator(value, cachedValue.value))
cachedValue.value = value;
}, watchOptions);
return cachedValue;
}
function useClamp(value, min, max) {
const _value = vueDemi.ref(value);
return vueDemi.computed({
get() {
return _value.value = shared.clamp(_value.value, vueDemi.unref(min), vueDemi.unref(max));
},
set(value2) {
_value.value = shared.clamp(value2, vueDemi.unref(min), vueDemi.unref(max));
}
});
}
function useClipboard(options = {}) {
const {
navigator = defaultNavigator,
read = false,
source,
copiedDuring = 1500
} = options;
const events = ["copy", "cut"];
const isSupported = Boolean(navigator && "clipboard" in navigator);
const text = vueDemi.ref("");
const copied = vueDemi.ref(false);
const timeout = shared.useTimeoutFn(() => copied.value = false, copiedDuring);
function updateText() {
navigator.clipboard.readText().then((value) => {
text.value = value;
});
}
if (isSupported && read) {
for (const event of events)
useEventListener(event, updateText);
}
async function copy(value = vueDemi.unref(source)) {
if (isSupported && value != null) {
await navigator.clipboard.writeText(value);
text.value = value;
copied.value = true;
timeout.start();
}
}
return {
isSupported,
text,
copied,
copy
};
}
const _global = typeof globalThis !== "undefined" ? globalThis : typeof window !== "undefined" ? window : typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : {};
const globalKey = "__vueuse_ssr_handlers__";
_global[globalKey] = _global[globalKey] || {};
const handlers = _global[globalKey];
function getSSRHandler(key, fallback) {
return handlers[key] || fallback;
}
function setSSRHandler(key, fn) {
handlers[key] = fn;
}
function guessSerializerType(rawInit) {
return rawInit == null ? "any" : rawInit instanceof Set ? "set" : rawInit instanceof Map ? "map" : rawInit instanceof Date ? "date" : typeof rawInit === "boolean" ? "boolean" : typeof rawInit === "string" ? "string" : typeof rawInit === "object" ? "object" : Array.isArray(rawInit) ? "object" : !Number.isNaN(rawInit) ? "number" : "any";
}
const StorageSerializers = {
boolean: {
read: (v) => v === "true",
write: (v) => String(v)
},
object: {
read: (v) => JSON.parse(v),
write: (v) => JSON.stringify(v)
},
number: {
read: (v) => Number.parseFloat(v),
write: (v) => String(v)
},
any: {
read: (v) => v,
write: (v) => String(v)
},
string: {
read: (v) => v,
write: (v) => String(v)
},
map: {
read: (v) => new Map(JSON.parse(v)),
write: (v) => JSON.stringify(Array.from(v.entries()))
},
set: {
read: (v) => new Set(JSON.parse(v)),
write: (v) => JSON.stringify(Array.from(v.entries()))
},
date: {
read: (v) => new Date(v),
write: (v) => v.toISOString()
}
};
function useStorage(key, initialValue, storage, options = {}) {
var _a;
const {
flush = "pre",
deep = true,
listenToStorageChanges = true,
writeDefaults = true,
shallow,
window = defaultWindow,
eventFilter,
onError = (e) => {
console.error(e);
}
} = options;
const data = (shallow ? vueDemi.shallowRef : vueDemi.ref)(initialValue);
if (!storage) {
try {
storage = getSSRHandler("getDefaultStorage", () => {
var _a2;
return (_a2 = defaultWindow) == null ? void 0 : _a2.localStorage;
})();
} catch (e) {
onError(e);
}
}
if (!storage)
return data;
const rawInit = vueDemi.unref(initialValue);
const type = guessSerializerType(rawInit);
const serializer = (_a = options.serializer) != null ? _a : StorageSerializers[type];
const { pause: pauseWatch, resume: resumeWatch } = shared.pausableWatch(data, () => write(data.value), { flush, deep, eventFilter });
if (window && listenToStorageChanges)
useEventListener(window, "storage", update);
update();
return data;
function write(v) {
try {
if (v == null)
storage.removeItem(key);
else
storage.setItem(key, serializer.write(v));
} catch (e) {
onError(e);
}
}
function read(event) {
if (event && event.key !== key)
return;
pauseWatch();
try {
const rawValue = event ? event.newValue : storage.getItem(key);
if (rawValue == null) {
if (writeDefaults && rawInit !== null)
storage.setItem(key, serializer.write(rawInit));
return rawInit;
} else if (typeof rawValue !== "string") {
return rawValue;
} else {
return serializer.read(rawValue);
}
} catch (e) {
onError(e);
} finally {
resumeWatch();
}
}
function update(event) {
if (event && event.key !== key)
return;
data.value = read(event);
}
}
function usePreferredDark(options) {
return useMediaQuery("(prefers-color-scheme: dark)", options);
}
var __defProp$f = Object.defineProperty;
var __getOwnPropSymbols$h = Object.getOwnPropertySymbols;
var __hasOwnProp$h = Object.prototype.hasOwnProperty;
var __propIsEnum$h = Object.prototype.propertyIsEnumerable;
var __defNormalProp$f = (obj, key, value) => key in obj ? __defProp$f(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
var __spreadValues$f = (a, b) => {
for (var prop in b || (b = {}))
if (__hasOwnProp$h.call(b, prop))
__defNormalProp$f(a, prop, b[prop]);
if (__getOwnPropSymbols$h)
for (var prop of __getOwnPropSymbols$h(b)) {
if (__propIsEnum$h.call(b, prop))
__defNormalProp$f(a, prop, b[prop]);
}
return a;
};
function useColorMode(options = {}) {
const {
selector = "html",
attribute = "class",
window = defaultWindow,
storage,
storageKey = "vueuse-color-scheme",
listenToStorageChanges = true,
storageRef
} = options;
const modes = __spreadValues$f({
auto: "",
light: "light",
dark: "dark"
}, options.modes || {});
const preferredDark = usePreferredDark({ window });
const preferredMode = vueDemi.computed(() => preferredDark.value ? "dark" : "light");
const store = storageRef || (storageKey == null ? vueDemi.ref("auto") : useStorage(storageKey, "auto", storage, { window, listenToStorageChanges }));
const state = vueDemi.computed({
get() {
return store.value === "auto" ? preferredMode.value : store.value;
},
set(v) {
store.value = v;
}
});
const updateHTMLAttrs = getSSRHandler("updateHTMLAttrs", (selector2, attribute2, value) => {
const el = window == null ? void 0 : window.document.querySelector(selector2);
if (!el)
return;
if (attribute2 === "class") {
const current = value.split(/\s/g);
Object.values(modes).flatMap((i) => (i || "").split(/\s/g)).filter(Boolean).forEach((v) => {
if (current.includes(v))
el.classList.add(v);
else
el.classList.remove(v);
});
} else {
el.setAttribute(attribute2, value);
}
});
function defaultOnChanged(mode) {
var _a;
updateHTMLAttrs(selector, attribute, (_a = modes[mode]) != null ? _a : mode);
}
function onChanged(mode) {
if (options.onChanged)
options.onChanged(mode, defaultOnChanged);
else
defaultOnChanged(mode);
}
vueDemi.watch(state, onChanged, { flush: "post", immediate: true });
shared.tryOnMounted(() => onChanged(state.value));
return state;
}
function useConfirmDialog(revealed = vueDemi.ref(false)) {
const confirmHook = shared.createEventHook();
const cancelHook = shared.createEventHook();
const revealHook = shared.createEventHook();
let _resolve = shared.noop;
const reveal = (data) => {
revealHook.trigger(data);
revealed.value = true;
return new Promise((resolve) => {
_resolve = resolve;
});
};
const confirm = (data) => {
revealed.value = false;
confirmHook.trigger(data);
_resolve({ data, isCanceled: false });
};
const cancel = (data) => {
revealed.value = false;
cancelHook.trigger(data);
_resolve({ data, isCanceled: true });
};
return {
isRevealed: vueDemi.computed(() => revealed.value),
reveal,
confirm,
cancel,
onReveal: revealHook.on,
onConfirm: confirmHook.on,
onCancel: cancelHook.on
};
}
function useCssVar(prop, target, { window = defaultWindow } = {}) {
const variable = vueDemi.ref("");
const elRef = vueDemi.computed(() => {
var _a;
return unrefElement(target) || ((_a = window == null ? void 0 : window.document) == null ? void 0 : _a.documentElement);
});
vueDemi.watch([elRef, () => vueDemi.unref(prop)], ([el, prop2]) => {
if (el && window)
variable.value = window.getComputedStyle(el).getPropertyValue(prop2);
}, { immediate: true });
vueDemi.watch(variable, (val) => {
var _a;
if ((_a = elRef.value) == null ? void 0 : _a.style)
elRef.value.style.setProperty(vueDemi.unref(prop), val);
});
return variable;
}
function useCycleList(list, options) {
var _a;
const state = vueDemi.shallowRef((_a = options == null ? void 0 : options.initialValue) != null ? _a : list[0]);
const index = vueDemi.computed({
get() {
var _a2;
let index2 = (options == null ? void 0 : options.getIndexOf) ? options.getIndexOf(state.value, list) : list.indexOf(state.value);
if (index2 < 0)
index2 = (_a2 = options == null ? void 0 : options.fallbackIndex) != null ? _a2 : 0;
return index2;
},
set(v) {
set(v);
}
});
function set(i) {
const length = list.length;
const index2 = (i % length + length) % length;
const value = list[index2];
state.value = value;
return value;
}
function shift(delta = 1) {
return set(index.value + delta);
}
function next(n = 1) {
return shift(n);
}
function prev(n = 1) {
return shift(-n);
}
return {
state,
index,
next,
prev
};
}
var __defProp$e = Object.defineProperty;
var __defProps$7 = Object.defineProperties;
var __getOwnPropDescs$7 = Object.getOwnPropertyDescriptors;
var __getOwnPropSymbols$g = Object.getOwnPropertySymbols;
var __hasOwnProp$g = Object.prototype.hasOwnProperty;
var __propIsEnum$g = Object.prototype.propertyIsEnumerable;
var __defNormalProp$e = (obj, key, value) => key in obj ? __defProp$e(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
var __spreadValues$e = (a, b) => {
for (var prop in b || (b = {}))
if (__hasOwnProp$g.call(b, prop))
__defNormalProp$e(a, prop, b[prop]);
if (__getOwnPropSymbols$g)
for (var prop of __getOwnPropSymbols$g(b)) {
if (__propIsEnum$g.call(b, prop))
__defNormalProp$e(a, prop, b[prop]);
}
return a;
};
var __spreadProps$7 = (a, b) => __defProps$7(a, __getOwnPropDescs$7(b));
function useDark(options = {}) {
const {
valueDark = "dark",
valueLight = "",
window = defaultWindow
} = options;
const mode = useColorMode(__spreadProps$7(__spreadValues$e({}, options), {
onChanged: (mode2, defaultHandler) => {
var _a;
if (options.onChanged)
(_a = options.onChanged) == null ? void 0 : _a.call(options, mode2 === "dark");
else
defaultHandler(mode2);
},
modes: {
dark: valueDark,
light: valueLight
}
}));
const preferredDark = usePreferredDark({ window });
const isDark = vueDemi.computed({
get() {
return mode.value === "dark";
},
set(v) {
if (v === preferredDark.value)
mode.value = "auto";
else
mode.value = v ? "dark" : "light";
}
});
return isDark;
}
const fnClone = (v) => JSON.parse(JSON.stringify(v));
const fnBypass = (v) => v;
const fnSetSource = (source, value) => source.value = value;
function defaultDump(clone) {
return clone ? shared.isFunction(clone) ? clone : fnClone : fnBypass;
}
function defaultParse(clone) {
return clone ? shared.isFunction(clone) ? clone : fnClone : fnBypass;
}
function useManualRefHistory(source, options = {}) {
const {
clone = false,
dump = defaultDump(clone),
parse = defaultParse(clone),
setSource = fnSetSource
} = options;
function _createHistoryRecord() {
return vueDemi.markRaw({
snapshot: dump(source.value),
timestamp: shared.timestamp()
});
}
const last = vueDemi.ref(_createHistoryRecord());
const undoStack = vueDemi.ref([]);
const redoStack = vueDemi.ref([]);
const _setSource = (record) => {
setSource(source, parse(record.snapshot));
last.value = record;
};
const commit = () => {
undoStack.value.unshift(last.value);
last.value = _createHistoryRecord();
if (options.capacity && undoStack.value.length > options.capacity)
undoStack.value.splice(options.capacity, Infinity);
if (redoStack.value.length)
redoStack.value.splice(0, redoStack.value.length);
};
const clear = () => {
undoStack.value.splice(0, undoStack.value.length);
redoStack.value.splice(0, redoStack.value.length);
};
const undo = () => {
const state = undoStack.value.shift();
if (state) {
redoStack.value.unshift(last.value);
_setSource(state);
}
};
const redo = () => {
const state = redoStack.value.shift();
if (state) {
undoStack.value.unshift(last.value);
_setSource(state);
}
};
const reset = () => {
_setSource(last.value);
};
const history = vueDemi.computed(() => [last.value, ...undoStack.value]);
const canUndo = vueDemi.computed(() => undoStack.value.length > 0);
const canRedo = vueDemi.computed(() => redoStack.value.length > 0);
return {
source,
undoStack,
redoStack,
last,
history,
canUndo,
canRedo,
clear,
commit,
reset,
undo,
redo
};
}
var __defProp$d = Object.defineProperty;
var __defProps$6 = Object.defineProperties;
var __getOwnPropDescs$6 = Object.getOwnPropertyDescriptors;
var __getOwnPropSymbols$f = Object.getOwnPropertySymbols;
var __hasOwnProp$f = Object.prototype.hasOwnProperty;
var __propIsEnum$f = Object.prototype.propertyIsEnumerable;
var __defNormalProp$d = (obj, key, value) => key in obj ? __defProp$d(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
var __spreadValues$d = (a, b) => {
for (var prop in b || (b = {}))
if (__hasOwnProp$f.call(b, prop))
__defNormalProp$d(a, prop, b[prop]);
if (__getOwnPropSymbols$f)
for (var prop of __getOwnPropSymbols$f(b)) {
if (__propIsEnum$f.call(b, prop))
__defNormalProp$d(a, prop, b[prop]);
}
return a;
};
var __spreadProps$6 = (a, b) => __defProps$6(a, __getOwnPropDescs$6(b));
function useRefHistory(source, options = {}) {
const {
deep = false,
flush = "pre",
eventFilter
} = options;
const {
eventFilter: composedFilter,
pause,
resume: resumeTracking,
isActive: isTracking
} = shared.pausableFilter(eventFilter);
const {
ignoreUpdates,
ignorePrevAsyncUpdates,
stop
} = shared.watchIgnorable(source, commit, { deep, flush, eventFilter: composedFilter });
function setSource(source2, value) {
ignorePrevAsyncUpdates();
ignoreUpdates(() => {
source2.value = value;
});
}
const manualHistory = useManualRefHistory(source, __spreadProps$6(__spreadValues$d({}, options), { clone: options.clone || deep, setSource }));
const { clear, commit: manualCommit } = manualHistory;
function commit() {
ignorePrevAsyncUpdates();
manualCommit();
}
function resume(commitNow) {
resumeTracking();
if (commitNow)
commit();
}
function batch(fn) {
let canceled = false;
const cancel = () => canceled = true;
ignoreUpdates(() => {
fn(cancel);
});
if (!canceled)
commit();
}
function dispose() {
stop();
clear();
}
return __spreadProps$6(__spreadValues$d({}, manualHistory), {
isTracking,
pause,
resume,
commit,
batch,
dispose
});
}
var __defProp$c = Object.defineProperty;
var __defProps$5 = Object.defineProperties;
var __getOwnPropDescs$5 = Object.getOwnPropertyDescriptors;
var __getOwnPropSymbols$e = Object.getOwnPropertySymbols;
var __hasOwnProp$e = Object.prototype.hasOwnProperty;
var __propIsEnum$e = Object.prototype.propertyIsEnumerable;
var __defNormalProp$c = (obj, key, value) => key in obj ? __defProp$c(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
var __spreadValues$c = (a, b) => {
for (var prop in b || (b = {}))
if (__hasOwnProp$e.call(b, prop))
__defNormalProp$c(a, prop, b[prop]);
if (__getOwnPropSymbols$e)
for (var prop of __getOwnPropSymbols$e(b)) {
if (__propIsEnum$e.call(b, prop))
__defNormalProp$c(a, prop, b[prop]);
}
return a;
};
var __spreadProps$5 = (a, b) => __defProps$5(a, __getOwnPropDescs$5(b));
function useDebouncedRefHistory(source, options = {}) {
const filter = options.debounce ? shared.debounceFilter(options.debounce) : void 0;
const history = useRefHistory(source, __spreadProps$5(__spreadValues$c({}, options), { eventFilter: filter }));
return __spreadValues$c({}, history);
}
function useDeviceMotion(options = {}) {
const {
window = defaultWindow,
eventFilter = shared.bypassFilter
} = options;
const acceleration = vueDemi.ref({ x: null, y: null, z: null });
const rotationRate = vueDemi.ref({ alpha: null, beta: null, gamma: null });
const interval = vueDemi.ref(0);
const accelerationIncludingGravity = vueDemi.ref({
x: null,
y: null,
z: null
});
if (window) {
const onDeviceMotion = shared.createFilterWrapper(eventFilter, (event) => {
acceleration.value = event.acceleration;
accelerationIncludingGravity.value = event.accelerationIncludingGravity;
rotationRate.value = event.rotationRate;
interval.value = event.interval;
});
useEventListener(window, "devicemotion", onDeviceMotion);
}
return {
acceleration,
accelerationIncludingGravity,
rotationRate,
interval
};
}
function useDeviceOrientation(options = {}) {
const { window = defaultWindow } = options;
const isSupported = Boolean(window && "DeviceOrientationEvent" in window);
const isAbsolute = vueDemi.ref(false);
const alpha = vueDemi.ref(null);
const beta = vueDemi.ref(null);
const gamma = vueDemi.ref(null);
if (window && isSupported) {
useEventListener(window, "deviceorientation", (event) => {
isAbsolute.value = event.absolute;
alpha.value = event.alpha;
beta.value = event.beta;
gamma.value = event.gamma;
});
}
return {
isSupported,
isAbsolute,
alpha,
beta,
gamma
};
}
const DEVICE_PIXEL_RATIO_SCALES = [
1,
1.325,
1.4,
1.5,
1.8,
2,
2.4,
2.5,
2.75,
3,
3.5,
4
];
function useDevicePixelRatio({
window = defaultWindow
} = {}) {
if (!window) {
return {
pixelRatio: vueDemi.ref(1)
};
}
const pixelRatio = vueDemi.ref(window.devicePixelRatio);
const handleDevicePixelRatio = () => {
pixelRatio.value = window.devicePixelRatio;
};
useEventListener(window, "resize", handleDevicePixelRatio, { passive: true });
DEVICE_PIXEL_RATIO_SCALES.forEach((dppx) => {
const mqlMin = useMediaQuery(`screen and (min-resolution: ${dppx}dppx)`);
const mqlMax = useMediaQuery(`screen and (max-resolution: ${dppx}dppx)`);
vueDemi.watch([mqlMin, mqlMax], handleDevicePixelRatio);
});
return { pixelRatio };
}
function usePermission(permissionDesc, options = {}) {
const {
controls = false,
navigator = defaultNavigator
} = options;
const isSupported = Boolean(navigator && "permissions" in navigator);
let permissionStatus;
const desc = typeof permissionDesc === "string" ? { name: permissionDesc } : permissionDesc;
const state = vueDemi.ref();
const onChange = () => {
if (permissionStatus)
state.value = permissionStatus.state;
};
const query = shared.createSingletonPromise(async () => {
if (!isSupported)
return;
if (!permissionStatus) {
try {
permissionStatus = await navigator.permissions.query(desc);
useEventListener(permissionStatus, "change", onChange);
onChange();
} catch (e) {
state.value = "prompt";
}
}
return permissionStatus;
});
query();
if (controls) {
return {
state,
isSupported,
query
};
} else {
return state;
}
}
function useDevicesList(options = {}) {
const {
navigator = defaultNavigator,
requestPermissions = false,
constraints = { audio: true, video: true },
onUpdated
} = options;
const devices = vueDemi.ref([]);
const videoInputs = vueDemi.computed(() => devices.value.filter((i) => i.kind === "videoinput"));
const audioInputs = vueDemi.computed(() => devices.value.filter((i) => i.kind === "audioinput"));
const audioOutputs = vueDemi.computed(() => devices.value.filter((i) => i.kind === "audiooutput"));
let isSupported = false;
const permissionGranted = vueDemi.ref(false);
async function update() {
if (!isSupported)
return;
devices.value = await navigator.mediaDevices.enumerateDevices();
onUpdated == null ? void 0 : onUpdated(devices.value);
}
async function ensurePermissions() {
if (!isSupported)
return false;
if (permissionGranted.value)
return true;
const { state, query } = usePermission("camera", { controls: true });
await query();
if (state.value !== "granted") {
const stream = await navigator.mediaDevices.getUserMedia(constraints);
stream.getTracks().forEach((t) => t.stop());
update();
permissionGranted.value = true;
} else {
permissionGranted.value = true;
}
return permissionGranted.value;
}
if (navigator) {
isSupported = Boolean(navigator.mediaDevices && navigator.mediaDevices.enumerateDevices);
if (isSupported) {
if (requestPermissions)
ensurePermissions();
useEventListener(navigator.mediaDevices, "devicechange", update);
update();
}
}
return {
devices,
ensurePermissions,
permissionGranted,
videoInputs,
audioInputs,
audioOutputs,
isSupported
};
}
function useDisplayMedia(options = {}) {
var _a, _b;
const enabled = vueDemi.ref((_a = options.enabled) != null ? _a : false);
const video = options.video;
const audio = options.audio;
const { navigator = defaultNavigator } = options;
const isSupported = Boolean((_b = navigator == null ? void 0 : navigator.mediaDevices) == null ? void 0 : _b.getDisplayMedia);
const constraint = { audio, video };
const stream = vueDemi.shallowRef();
async function _start() {
if (!isSupported || stream.value)
return;
stream.value = await navigator.mediaDevices.getDisplayMedia(constraint);
return stream.value;
}
async function _stop() {
var _a2;
(_a2 = stream.value) == null ? void 0 : _a2.getTracks().forEach((t) => t.stop());
stream.value = void 0;
}
function stop() {
_stop();
enabled.value = false;
}
async function start() {
await _start();
if (stream.value)
enabled.value = true;
return stream.value;
}
vueDemi.watch(enabled, (v) => {
if (v)
_start();
else
_stop();
}, { immediate: true });
return {
isSupported,
stream,
start,
stop,
enabled
};
}
function useDocumentVisibility({ document = defaultDocument } = {}) {
if (!document)
return vueDemi.ref("visible");
const visibility = vueDemi.ref(document.visibilityState);
useEventListener(document, "visibilitychange", () => {
visibility.value = document.visibilityState;
});
return visibility;
}
var __defProp$b = Object.defineProperty;
var __defProps$4 = Object.defineProperties;
var __getOwnPropDescs$4 = Object.getOwnPropertyDescriptors;
var __getOwnPropSymbols$d = Object.getOwnPropertySymbols;
var __hasOwnProp$d = Object.prototype.hasOwnProperty;
var __propIsEnum$d = Object.prototype.propertyIsEnumerable;
var __defNormalProp$b = (obj, key, value) => key in obj ? __defProp$b(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
var __spreadValues$b = (a, b) => {
for (var prop in b || (b = {}))
if (__hasOwnProp$d.call(b, prop))
__defNormalProp$b(a, prop, b[prop]);
if (__getOwnPropSymbols$d)
for (var prop of __getOwnPropSymbols$d(b)) {
if (__propIsEnum$d.call(b, prop))
__defNormalProp$b(a, prop, b[prop]);
}
return a;
};
var __spreadProps$4 = (a, b) => __defProps$4(a, __getOwnPropDescs$4(b));
function useDraggable(target, options = {}) {
var _a, _b;
const draggingElement = (_a = options.draggingElement) != null ? _a : defaultWindow;
const position = vueDemi.ref((_b = options.initialValue) != null ? _b : { x: 0, y: 0 });
const pressedDelta = vueDemi.ref();
const filterEvent = (e) => {
if (options.pointerTypes)
return options.pointerTypes.includes(e.pointerType);
return true;
};
const handleEvent = (e) => {
if (vueDemi.unref(options.preventDefault))
e.preventDefault();
if (vueDemi.unref(options.stopPropagation))
e.stopPropagation();
};
const start = (e) => {
var _a2;
if (!filterEvent(e))
return;
if (vueDemi.unref(options.exact) && e.target !== vueDemi.unref(target))
return;
const rect = vueDemi.unref(target).getBoundingClientRect();
const pos = {
x: e.pageX - rect.left,
y: e.pageY - rect.top
};
if (((_a2 = options.onStart) == null ? void 0 : _a2.call(options, pos, e)) === false)
return;
pressedDelta.value = pos;
handleEvent(e);
};
const move = (e) => {
var _a2;
if (!filterEvent(e))
return;
if (!pressedDelta.value)
return;
position.value = {
x: e.pageX - pressedDelta.value.x,
y: e.pageY - pressedDelta.value.y
};
(_a2 = options.onMove) == null ? void 0 : _a2.call(options, position.value, e);
handleEvent(e);
};
const end = (e) => {
var _a2;
if (!filterEvent(e))
return;
if (!pressedDelta.value)
return;
pressedDelta.value = void 0;
(_a2 = options.onEnd) == null ? void 0 : _a2.call(options, position.value, e);
handleEvent(e);
};
if (shared.isClient) {
useEventListener(target, "pointerdown", start, true);
useEventListener(draggingElement, "pointermove", move, true);
useEventListener(draggingElement, "pointerup", end, true);
}
return __spreadProps$4(__spreadValues$b({}, shared.toRefs(position)), {
position,
isDragging: vueDemi.computed(() => !!pressedDelta.value),
style: vueDemi.computed(() => `left:${position.value.x}px;top:${position.value.y}px;`)
});
}
var __getOwnPropSymbols$c = Object.getOwnPropertySymbols;
var __hasOwnProp$c = Object.prototype.hasOwnProperty;
var __propIsEnum$c = Object.prototype.propertyIsEnumerable;
var __objRest$2 = (source, exclude) => {
var target = {};
for (var prop in source)
if (__hasOwnProp$c.call(source, prop) && exclude.indexOf(prop) < 0)
target[prop] = source[prop];
if (source != null && __getOwnPropSymbols$c)
for (var prop of __getOwnPropSymbols$c(source)) {
if (exclude.indexOf(prop) < 0 && __propIsEnum$c.call(source, prop))
target[prop] = source[prop];
}
return target;
};
function useResizeObserver(target, callback, options = {}) {
const _a = options, { window = defaultWindow } = _a, observerOptions = __objRest$2(_a, ["window"]);
let observer;
const isSupported = window && "ResizeObserver" in window;
const cleanup = () => {
if (observer) {
observer.disconnect();
observer = void 0;
}
};
const stopWatch = vueDemi.watch(() => unrefElement(target), (el) => {
cleanup();
if (isSupported && window && el) {
observer = new ResizeObserver(callback);
observer.observe(el, observerOptions);
}
}, { immediate: true, flush: "post" });
const stop = () => {
cleanup();
stopWatch();
};
shared.tryOnScopeDispose(stop);
return {
isSupported,
stop
};
}
function useElementBounding(target) {
const height = vueDemi.ref(0);
const bottom = vueDemi.ref(0);
const left = vueDemi.ref(0);
const right = vueDemi.ref(0);
const top = vueDemi.ref(0);
const width = vueDemi.ref(0);
const x = vueDemi.ref(0);
const y = vueDemi.ref(0);
function update() {
const el = unrefElement(target);
if (!el) {
height.value = 0;
bottom.value = 0;
left.value = 0;
right.value = 0;
top.value = 0;
width.value = 0;
x.value = 0;
y.value = 0;
return;
}
const rect = el.getBoundingClientRect();
height.value = rect.height;
bottom.value = rect.bottom;
left.value = rect.left;
right.value = rect.right;
top.value = rect.top;
width.value = rect.width;
x.value = rect.x;
y.value = rect.y;
}
useEventListener("scroll", update, true);
useResizeObserver(target, update);
vueDemi.watch(() => unrefElement(target), (ele) => !ele && update());
return {
height,
bottom,
left,
right,
top,
width,
x,
y,
update
};
}
function useRafFn(fn, options = {}) {
const {
immediate = true,
window = defaultWindow
} = options;
const isActive = vueDemi.ref(false);
function loop() {
if (!isActive.value || !window)
return;
fn();
window.requestAnimationFrame(loop);
}
function resume() {
if (!isActive.value && window) {
isActive.value = true;
loop();
}
}
function pause() {
isActive.value = false;
}
if (immediate)
resume();
shared.tryOnScopeDispose(pause);
return {
isActive,
pause,
resume
};
}
var __defProp$a = Object.defineProperty;
var __getOwnPropSymbols$b = Object.getOwnPropertySymbols;
var __hasOwnProp$b = Object.prototype.hasOwnProperty;
var __propIsEnum$b = Object.prototype.propertyIsEnumerable;
var __defNormalProp$a = (obj, key, value) => key in obj ? __defProp$a(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
var __spreadValues$a = (a, b) => {
for (var prop in b || (b = {}))
if (__hasOwnProp$b.call(b, prop))
__defNormalProp$a(a, prop, b[prop]);
if (__getOwnPropSymbols$b)
for (var prop of __getOwnPropSymbols$b(b)) {
if (__propIsEnum$b.call(b, prop))
__defNormalProp$a(a, prop, b[prop]);
}
return a;
};
function useElementByPoint(options) {
const element = vueDemi.ref(null);
const { x, y } = options;
const controls = useRafFn(() => {
element.value = document.elementFromPoint(vueDemi.unref(x), vueDemi.unref(y));
});
return __spreadValues$a({
element
}, controls);
}
function useElementHover(el) {
const isHovered = vueDemi.ref(false);
useEventListener(el, "mouseenter", () => isHovered.value = true);
useEventListener(el, "mouseleave", () => isHovered.value = false);
return isHovered;
}
function useElementSize(target, initialSize = { width: 0, height: 0 }, options = {}) {
const width = vueDemi.ref(initialSize.width);
const height = vueDemi.ref(initialSize.height);
useResizeObserver(target, ([entry]) => {
width.value = entry.contentRect.width;
height.value = entry.contentRect.height;
}, options);
vueDemi.watch(() => unrefElement(target), (ele) => {
width.value = ele ? initialSize.width : 0;
height.value = ele ? initialSize.height : 0;
});
return {
width,
height
};
}
function useElementVisibility(element, { window = defaultWindow, scrollTarget } = {}) {
const elementIsVisible = vueDemi.ref(false);
const testBounding = () => {
if (!window)
return;
const document = window.document;
if (!vueDemi.unref(element)) {
elementIsVisible.value = false;
} else {
const rect = vueDemi.unref(element).getBoundingClientRect();
elementIsVisible.value = rect.top <= (window.innerHeight || document.documentElement.clientHeight) && rect.left <= (window.innerWidth || document.documentElement.clientWidth) && rect.bottom >= 0 && rect.right >= 0;
}
};
shared.tryOnMounted(testBounding);
if (window)
shared.tryOnMounted(() => useEventListener(vueDemi.unref(scrollTarget) || window, "scroll", testBounding, { capture: false, passive: true }));
return elementIsVisible;
}
const events = /* @__PURE__ */ new Map();
function useEventBus(key) {
const scope = vueDemi.getCurrentScope();
function on(listener) {
const listeners = events.get(key) || [];
listeners.push(listener);
events.set(key, listeners);
const _off = () => off(listener);
scope == null ? void 0 : scope.cleanups.push(_off);
return _off;
}
function once(listener) {
function _listener(...args) {
off(_listener);
listener(...args);
}
return on(_listener);
}
function off(listener) {
const listeners = events.get(key);
if (!listeners)
return;
const index = listeners.indexOf(listener);
if (index > -1)
listeners.splice(index, 1);
if (!listeners.length)
events.delete(key);
}
function reset() {
events.delete(key);
}
function emit(event, payload) {
var _a;
(_a = events.get(key)) == null ? void 0 : _a.forEach((v) => v(event, payload));
}
return { on, once, off, emit, reset };
}
function useEventSource(url, events = [], options = {}) {
const event = vueDemi.ref(null);
const data = vueDemi.ref(null);
const status = vueDemi.ref("CONNECTING");
const eventSource = vueDemi.ref(null);
const error = vueDemi.ref(null);
const {
withCredentials = false
} = options;
const close = () => {
if (eventSource.value) {
eventSource.value.close();
eventSource.value = null;
status.value = "CLOSED";
}
};
const es = new EventSource(url, { withCredentials });
eventSource.value = es;
es.onopen = () => {
status.value = "OPEN";
error.value = null;
};
es.onerror = (e) => {
status.value = "CLOSED";
error.value = e;
};
es.onmessage = (e) => {
event.value = null;
data.value = e.data;
};
for (const event_name of events) {
useEventListener(es, event_name, (e) => {
event.value = event_name;
data.value = e.data || null;
});
}
shared.tryOnScopeDispose(() => {
close();
});
return {
eventSource,
event,
data,
status,
error,
close
};
}
function useEyeDropper(options = {}) {
const { initialValue = "" } = options;
const isSupported = Boolean(typeof window !== "undefined" && "EyeDropper" in window);
const sRGBHex = vueDemi.ref(initialValue);
async function open(openOptions) {
if (!isSupported)
return;
const eyeDropper = new window.EyeDropper();
const result = await eyeDropper.open(openOptions);
sRGBHex.value = result.sRGBHex;
return result;
}
return { isSupported, sRGBHex, open };
}
function useFavicon(newIcon = null, options = {}) {
const {
baseUrl = "",
rel = "icon",
document = defaultDocument
} = options;
const favicon = vueDemi.isRef(newIcon) ? newIcon : vueDemi.ref(newIcon);
const applyIcon = (icon) => {
document == null ? void 0 : document.head.querySelectorAll(`link[rel*="${rel}"]`).forEach((el) => el.href = `${baseUrl}${icon}`);
};
vueDemi.watch(favicon, (i, o) => {
if (shared.isString(i) && i !== o)
applyIcon(i);
}, { immediate: true });
return favicon;
}
var __defProp$9 = Object.defineProperty;
var __defProps$3 = Object.defineProperties;
var __getOwnPropDescs$3 = Object.getOwnPropertyDescriptors;
var __getOwnPropSymbols$a = Object.getOwnPropertySymbols;
var __hasOwnProp$a = Object.prototype.hasOwnProperty;
var __propIsEnum$a = Object.prototype.propertyIsEnumerable;
var __defNormalProp$9 = (obj, key, value) => key in obj ? __defProp$9(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
var __spreadValues$9 = (a, b) => {
for (var prop in b || (b = {}))
if (__hasOwnProp$a.call(b, prop))
__defNormalProp$9(a, prop, b[prop]);
if (__getOwnPropSymbols$a)
for (var prop of __getOwnPropSymbols$a(b)) {
if (__propIsEnum$a.call(b, prop))
__defNormalProp$9(a, prop, b[prop]);
}
return a;
};
var __spreadProps$3 = (a, b) => __defProps$3(a, __getOwnPropDescs$3(b));
const payloadMapping = {
json: "application/json",
text: "text/plain",
formData: "multipart/form-data"
};
function isFetchOptions(obj) {
return shared.containsProp(obj, "immediate", "refetch", "initialData", "timeout", "beforeFetch", "afterFetch", "onFetchError");
}
function headersToObject(headers) {
if (headers instanceof Headers)
return Object.fromEntries([...headers.entries()]);
return headers;
}
function createFetch(config = {}) {
const _options = config.options || {};
const _fetchOptions = config.fetchOptions || {};
function useFactoryFetch(url, ...args) {
const computedUrl = vueDemi.computed(() => config.baseUrl ? joinPaths(vueDemi.unref(config.baseUrl), vueDemi.unref(url)) : vueDemi.unref(url));
let options = _options;
let fetchOptions = _fetchOptions;
if (args.length > 0) {
if (isFetchOptions(args[0])) {
options = __spreadValues$9(__spreadValues$9({}, options), args[0]);
} else {
fetchOptions = __spreadProps$3(__spreadValues$9(__spreadValues$9({}, fetchOptions), args[0]), {
headers: __spreadValues$9(__spreadValues$9({}, headersToObject(fetchOptions.headers) || {}), headersToObject(args[0].headers) || {})
});
}
}
if (args.length > 1 && isFetchOptions(args[1]))
options = __spreadValues$9(__spreadValues$9({}, options), args[1]);
return useFetch(computedUrl, fetchOptions, options);
}
return useFactoryFetch;
}
function useFetch(url, ...args) {
var _a;
const supportsAbort = typeof AbortController === "function";
let fetchOptions = {};
let options = { immediate: true, refetch: false, timeout: 0 };
const config = {
method: "GET",
type: "text",
payload: void 0
};
if (args.length > 0) {
if (isFetchOptions(args[0]))
options = __spreadValues$9(__spreadValues$9({}, options), args[0]);
else
fetchOptions = args[0];
}
if (args.length > 1) {
if (isFetchOptions(args[1]))
options = __spreadValues$9(__spreadValues$9({}, options), args[1]);
}
const {
fetch = (_a = defaultWindow) == null ? void 0 : _a.fetch,
initialData,
timeout
} = options;
const responseEvent = shared.createEventHook();
const errorEvent = shared.createEventHook();
const finallyEvent = shared.createEventHook();
const isFinished = vueDemi.ref(false);
const isFetching = vueDemi.ref(false);
const aborted = vueDemi.ref(false);
const statusCode = vueDemi.ref(null);
const response = vueDemi.shallowRef(null);
const error = vueDemi.shallowRef(null);
const data = vueDemi.shallowRef(initialData);
const canAbort = vueDemi.computed(() => supportsAbort && isFetching.value);
let controller;
let timer;
const abort = () => {
if (supportsAbort && controller)
controller.abort();
};
const loading = (isLoading) => {
isFetching.value = isLoading;
isFinished.value = !isLoading;
};
if (timeout)
timer = shared.useTimeoutFn(abort, timeout, { immediate: false });
const execute = async (throwOnFailed = false) => {
var _a2;
loading(true);
error.value = null;
statusCode.value = null;
aborted.value = false;
controller = void 0;
if (supportsAbort) {
controller = new AbortController();
controller.signal.onabort = () => aborted.value = true;
fetchOptions = __spreadProps$3(__spreadValues$9({}, fetchOptions), {
signal: controller.signal
});
}
const defaultFetchOptions = {
method: config.method,
headers: {}
};
if (config.payload) {
const headers = headersToObject(defaultFetchOptions.headers);
if (config.payloadType)
headers["Content-Type"] = (_a2 = payloadMapping[config.payloadType]) != null ? _a2 : config.payloadType;
defaultFetchOptions.body = config.payloadType === "json" ? JSON.stringify(vueDemi.unref(config.payload)) : vueDemi.unref(config.payload);
}
let isCanceled = false;
const context = { url: vueDemi.unref(url), options: fetchOptions, cancel: () => {
isCanceled = true;
} };
if (options.beforeFetch)
Object.assign(context, await options.beforeFetch(context));
if (isCanceled || !fetch) {
loading(false);
return Promise.resolve(null);
}
let responseData = null;
if (timer)
timer.start();
return new Promise((resolve, reject) => {
var _a3;
fetch(context.url, __spreadProps$3(__spreadValues$9(__spreadValues$9({}, defaultFetchOptions), context.options), {
headers: __spreadValues$9(__spreadValues$9({}, headersToObject(defaultFetchOptions.headers)), headersToObject((_a3 = context.options) == null ? void 0 : _a3.headers))
})).then(async (fetchResponse) => {
response.value = fetchResponse;
statusCode.value = fetchResponse.status;
responseData = await fetchResponse[config.type]();
if (options.afterFetch && statusCode.value >= 200 && statusCode.value < 300)
({ data: responseData } = await options.afterFetch({ data: responseData, response: fetchResponse }));
data.value = responseData;
if (!fetchResponse.ok)
throw new Error(fetchResponse.statusText);
responseEvent.trigger(fetchResponse);
return resolve(fetchResponse);
}).catch(async (fetchError) => {
let errorData = fetchError.message || fetchError.name;
if (options.onFetchError)
({ data: responseData, error: errorData } = await options.onFetchError({ data: responseData, error: fetchError }));
data.value = responseData;
error.value = errorData;
errorEvent.trigger(fetchError);
if (throwOnFailed)
return reject(fetchError);
return resolve(null);
}).finally(() => {
loading(false);
if (timer)
timer.stop();
finallyEvent.trigger(null);
});
});
};
vueDemi.watch(() => [
vueDemi.unref(url),
vueDemi.unref(options.refetch)
], () => vueDemi.unref(options.refetch) && execute(), { deep: true });
const shell = {
isFinished,
statusCode,
response,
error,
data,
isFetching,
canAbort,
aborted,
abort,
execute,
onFetchResponse: responseEvent.on,
onFetchError: errorEvent.on,
onFetchFinally: finallyEvent.on,
get: setMethod("GET"),
put: setMethod("PUT"),
post: setMethod("POST"),
delete: setMethod("DELETE"),
patch: setMethod("PATCH"),
head: setMethod("HEAD"),
options: setMethod("OPTIONS"),
json: setType("json"),
text: setType("text"),
blob: setType("blob"),
arrayBuffer: setType("arrayBuffer"),
formData: setType("formData")
};
function setMethod(method) {
return (payload, payloadType) => {
if (!isFetching.value) {
config.method = method;
config.payload = payload;
config.payloadType = payloadType;
if (vueDemi.isRef(config.payload)) {
vueDemi.watch(() => [
vueDemi.unref(config.payload),
vueDemi.unref(options.refetch)
], () => vueDemi.unref(options.refetch) && execute(), { deep: true });
}
if (!payloadType && vueDemi.unref(payload) && Object.getPrototypeOf(vueDemi.unref(payload)) === Object.prototype)
config.payloadType = "json";
return shell;
}
return void 0;
};
}
function waitUntilFinished() {
return new Promise((resolve, reject) => {
shared.until(isFinished).toBe(true).then(() => resolve(shell)).catch((error2) => reject(error2));
});
}
function setType(type) {
return () => {
if (!isFetching.value) {
config.type = type;
return __spreadProps$3(__spreadValues$9({}, shell), {
then(onFulfilled, onRejected) {
return waitUntilFinished().then(onFulfilled, onRejected);
}
});
}
return void 0;
};
}
if (options.immediate)
setTimeout(execute, 0);
return __spreadProps$3(__spreadValues$9({}, shell), {
then(onFulfilled, onRejected) {
return waitUntilFinished().then(onFulfilled, onRejected);
}
});
}
function joinPaths(start, end) {
if (!start.endsWith("/") && !end.startsWith("/"))
return `${start}/${end}`;
return `${start}${end}`;
}
var __defProp$8 = Object.defineProperty;
var __getOwnPropSymbols$9 = Object.getOwnPropertySymbols;
var __hasOwnProp$9 = Object.prototype.hasOwnProperty;
var __propIsEnum$9 = Object.prototype.propertyIsEnumerable;
var __defNormalProp$8 = (obj, key, value) => key in obj ? __defProp$8(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
var __spreadValues$8 = (a, b) => {
for (var prop in b || (b = {}))
if (__hasOwnProp$9.call(b, prop))
__defNormalProp$8(a, prop, b[prop]);
if (__getOwnPropSymbols$9)
for (var prop of __getOwnPropSymbols$9(b)) {
if (__propIsEnum$9.call(b, prop))
__defNormalProp$8(a, prop, b[prop]);
}
return a;
};
function useFileSystemAccess(options = {}) {
const {
window: _window = defaultWindow,
dataType = "Text"
} = vueDemi.unref(options);
const window = _window;
const isSupported = Boolean(window && "showSaveFilePicker" in window && "showOpenFilePicker" in window);
const fileHandle = vueDemi.ref();
const data = vueDemi.ref();
const file = vueDemi.ref();
const fileName = vueDemi.computed(() => {
var _a, _b;
return (_b = (_a = file.value) == null ? void 0 : _a.name) != null ? _b : "";
});
const fileMIME = vueDemi.computed(() => {
var _a, _b;
return (_b = (_a = file.value) == null ? void 0 : _a.type) != null ? _b : "";
});
const fileSize = vueDemi.computed(() => {
var _a, _b;
return (_b = (_a = file.value) == null ? void 0 : _a.size) != null ? _b : 0;
});
const fileLastModified = vueDemi.computed(() => {
var _a, _b;
return (_b = (_a = file.value) == null ? void 0 : _a.lastModified) != null ? _b : 0;
});
async function open(_options = {}) {
if (!isSupported)
return;
const [handle] = await window.showOpenFilePicker(__spreadValues$8(__spreadValues$8({}, vueDemi.unref(options)), _options));
fileHandle.value = handle;
await updateFile();
await updateData();
}
async function create(_options = {}) {
if (!isSupported)
return;
fileHandle.value = await window.showSaveFilePicker(__spreadValues$8(__spreadValues$8({}, vueDemi.unref(options)), _options));
data.value = void 0;
await updateFile();
await updateData();
}
async function save(_options = {}) {
if (!isSupported)
return;
if (!fileHandle.value)
return saveAs(_options);
if (data.value) {
const writableStream = await fileHandle.value.createWritable();
await writableStream.write(data.value);
await writableStream.close();
}
await updateFile();
}
async function saveAs(_options = {}) {
if (!isSupported)
return;
fileHandle.value = await window.showSaveFilePicker(__spreadValues$8(__spreadValues$8({}, vueDemi.unref(options)), _options));
if (data.value) {
const writableStream = await fileHandle.value.createWritable();
await writableStream.write(data.value);
await writableStream.close();
}
await updateFile();
}
async function updateFile() {
var _a;
file.value = await ((_a = fileHandle.value) == null ? void 0 : _a.getFile());
}
async function updateData() {
var _a, _b;
if (vueDemi.unref(dataType) === "Text")
data.value = await ((_a = file.value) == null ? void 0 : _a.text());
if (vueDemi.unref(dataType) === "ArrayBuffer")
data.value = await ((_b = file.value) == null ? void 0 : _b.arrayBuffer());
if (vueDemi.unref(dataType) === "Blob")
data.value = file.value;
}
vueDemi.watch(() => vueDemi.unref(dataType), updateData);
return {
isSupported,
data,
file,
fileName,
fileMIME,
fileSize,
fileLastModified,
open,
create,
save,
saveAs,
updateData
};
}
function useFocus(target, options = {}) {
const { initialValue = false } = options;
const activeElement = useActiveElement(options);
const targetElement = vueDemi.computed(() => unrefElement(target));
const focused = vueDemi.computed({
get() {
return activeElement.value === targetElement.value;
},
set(value) {
var _a, _b;
if (!value && focused.value)
(_a = targetElement.value) == null ? void 0 : _a.blur();
if (value && !focused.value)
(_b = targetElement.value) == null ? void 0 : _b.focus();
}
});
vueDemi.watch(targetElement, () => {
focused.value = initialValue;
}, { immediate: true, flush: "post" });
return { focused };
}
function useFocusWithin(target, options = {}) {
const activeElement = useActiveElement(options);
const targetElement = vueDemi.computed(() => unrefElement(target));
const focused = vueDemi.computed(() => targetElement.value && activeElement.value ? targetElement.value.contains(activeElement.value) : false);
return { focused };
}
function useFps(options) {
var _a;
const fps = vueDemi.ref(0);
if (typeof performance === "undefined")
return fps;
const every = (_a = options == null ? void 0 : options.every) != null ? _a : 10;
let last = performance.now();
let ticks = 0;
useRafFn(() => {
ticks += 1;
if (ticks >= every) {
const now = performance.now();
const diff = now - last;
fps.value = Math.round(1e3 / (diff / ticks));
last = now;
ticks = 0;
}
});
return fps;
}
const functionsMap = [
[
"requestFullscreen",
"exitFullscreen",
"fullscreenElement",
"fullscreenEnabled",
"fullscreenchange",
"fullscreenerror"
],
[
"webkitRequestFullscreen",
"webkitExitFullscreen",
"webkitFullscreenElement",
"webkitFullscreenEnabled",
"webkitfullscreenchange",
"webkitfullscreenerror"
],
[
"webkitRequestFullScreen",
"webkitCancelFullScreen",
"webkitCurrentFullScreenElement",
"webkitCancelFullScreen",
"webkitfullscreenchange",
"webkitfullscreenerror"
],
[
"mozRequestFullScreen",
"mozCancelFullScreen",
"mozFullScreenElement",
"mozFullScreenEnabled",
"mozfullscreenchange",
"mozfullscreenerror"
],
[
"msRequestFullscreen",
"msExitFullscreen",
"msFullscreenElement",
"msFullscreenEnabled",
"MSFullscreenChange",
"MSFullscreenError"
]
];
function useFullscreen(target, options = {}) {
const { document = defaultDocument, autoExit = false } = options;
const targetRef = target || (document == null ? void 0 : document.querySelector("html"));
const isFullscreen = vueDemi.ref(false);
let isSupported = false;
let map = functionsMap[0];
if (!document) {
isSupported = false;
} else {
for (const m of functionsMap) {
if (m[1] in document) {
map = m;
isSupported = true;
break;
}
}
}
const [REQUEST, EXIT, ELEMENT, , EVENT] = map;
async function exit() {
if (!isSupported)
return;
if (document == null ? void 0 : document[ELEMENT])
await document[EXIT]();
isFullscreen.value = false;
}
async function enter() {
if (!isSupported)
return;
await exit();
const target2 = unrefElement(targetRef);
if (target2) {
await target2[REQUEST]();
isFullscreen.value = true;
}
}
async function toggle() {
if (isFullscreen.value)
await exit();
else
await enter();
}
if (document) {
useEventListener(document, EVENT, () => {
isFullscreen.value = !!(document == null ? void 0 : document[ELEMENT]);
}, false);
}
if (autoExit)
shared.tryOnScopeDispose(exit);
return {
isSupported,
isFullscreen,
enter,
exit,
toggle
};
}
function mapGamepadToXbox360Controller(gamepad) {
return vueDemi.computed(() => {
if (gamepad.value) {
return {
buttons: {
a: gamepad.value.buttons[0],
b: gamepad.value.buttons[1],
x: gamepad.value.buttons[2],
y: gamepad.value.buttons[3]
},
bumper: {
left: gamepad.value.buttons[4],
right: gamepad.value.buttons[5]
},
triggers: {
left: gamepad.value.buttons[6],
right: gamepad.value.buttons[7]
},
stick: {
left: {
horizontal: gamepad.value.axes[0],
vertical: gamepad.value.axes[1],
button: gamepad.value.buttons[10]
},
right: {
horizontal: gamepad.value.axes[2],
vertical: gamepad.value.axes[3],
button: gamepad.value.buttons[11]
}
},
dpad: {
up: gamepad.value.buttons[12],
down: gamepad.value.buttons[13],
left: gamepad.value.buttons[14],
right: gamepad.value.buttons[15]
},
back: gamepad.value.buttons[8],
start: gamepad.value.buttons[9]
};
}
return null;
});
}
function useGamepad(options = {}) {
const {
navigator = defaultNavigator
} = options;
const isSupported = navigator && "getGamepads" in navigator;
const gamepads = vueDemi.ref([]);
const onConnectedHook = shared.createEventHook();
const onDisconnectedHook = shared.createEventHook();
const stateFromGamepad = (gamepad) => {
const hapticActuators = [];
const vibrationActuator = "vibrationActuator" in gamepad ? gamepad.vibrationActuator : null;
if (vibrationActuator)
hapticActuators.push(vibrationActuator);
if (gamepad.hapticActuators)
hapticActuators.push(...gamepad.hapticActuators);
return {
id: gamepad.id,
hapticActuators,
index: gamepad.index,
mapping: gamepad.mapping,
connected: gamepad.connected,
timestamp: gamepad.timestamp,
axes: gamepad.axes.map((axes) => axes),
buttons: gamepad.buttons.map((button) => ({ pressed: button.pressed, touched: button.touched, value: button.value }))
};
};
const updateGamepadState = () => {
const _gamepads = (navigator == null ? void 0 : navigator.getGamepads()) || [];
for (let i = 0; i < _gamepads.length; ++i) {
const gamepad = _gamepads[i];
if (gamepad) {
const index = gamepads.value.findIndex(({ index: index2 }) => index2 === gamepad.index);
if (index > -1)
gamepads.value[index] = stateFromGamepad(gamepad);
}
}
};
const { isActive, pause, resume } = core.useRafFn(updateGamepadState);
const onGamepadConnected = (gamepad) => {
if (!gamepads.value.some(({ index }) => index === gamepad.index)) {
gamepads.value.push(stateFromGamepad(gamepad));
onConnectedHook.trigger(gamepad.index);
}
resume();
};
const onGamepadDisconnected = (gamepad) => {
gamepads.value = gamepads.value.filter((x) => x.index !== gamepad.index);
onDisconnectedHook.trigger(gamepad.index);
};
core.useEventListener("gamepadconnected", (e) => onGamepadConnected(e.gamepad));
core.useEventListener("gamepaddisconnected", (e) => onGamepadDisconnected(e.gamepad));
shared.tryOnMounted(() => {
const _gamepads = (navigator == null ? void 0 : navigator.getGamepads()) || [];
if (_gamepads) {
for (let i = 0; i < _gamepads.length; ++i) {
const gamepad = _gamepads[i];
if (gamepad)
onGamepadConnected(gamepad);
}
}
});
pause();
return {
isSupported,
onConnected: onConnectedHook.on,
onDisconnected: onDisconnectedHook.on,
gamepads,
pause,
resume,
isActive
};
}
function useGeolocation(options = {}) {
const {
enableHighAccuracy = true,
maximumAge = 3e4,
timeout = 27e3,
navigator = defaultNavigator
} = options;
const isSupported = navigator && "geolocation" in navigator;
const locatedAt = vueDemi.ref(null);
const error = vueDemi.ref(null);
const coords = vueDemi.ref({
accuracy: 0,
latitude: Infinity,
longitude: Infinity,
altitude: null,
altitudeAccuracy: null,
heading: null,
speed: null
});
function updatePosition(position) {
locatedAt.value = position.timestamp;
coords.value = position.coords;
error.value = null;
}
let watcher;
if (isSupported) {
watcher = navigator.geolocation.watchPosition(updatePosition, (err) => error.value = err, {
enableHighAccuracy,
maximumAge,
timeout
});
}
shared.tryOnScopeDispose(() => {
if (watcher && navigator)
navigator.geolocation.clearWatch(watcher);
});
return {
isSupported,
coords,
locatedAt,
error
};
}
const defaultEvents$1 = ["mousemove", "mousedown", "resize", "keydown", "touchstart", "wheel"];
const oneMinute = 6e4;
function useIdle(timeout = oneMinute, options = {}) {
const {
initialState = false,
listenForVisibilityChange = true,
events = defaultEvents$1,
window = defaultWindow,
eventFilter = shared.throttleFilter(50)
} = options;
const idle = vueDemi.ref(initialState);
const lastActive = vueDemi.ref(shared.timestamp());
let timer;
const onEvent = shared.createFilterWrapper(eventFilter, () => {
idle.value = false;
lastActive.value = shared.timestamp();
clearTimeout(timer);
timer = setTimeout(() => idle.value = true, timeout);
});
if (window) {
const document = window.document;
for (const event of events)
useEventListener(window, event, onEvent, { passive: true });
if (listenForVisibilityChange) {
useEventListener(document, "visibilitychange", () => {
if (!document.hidden)
onEvent();
});
}
}
timer = setTimeout(() => idle.value = true, timeout);
return { idle, lastActive };
}
function useScroll(element, options = {}) {
const {
throttle = 0,
idle = 200,
onStop = shared.noop,
onScroll = shared.noop,
offset = {
left: 0,
right: 0,
top: 0,
bottom: 0
},
eventListenerOptions = {
capture: false,
passive: true
}
} = options;
const x = vueDemi.ref(0);
const y = vueDemi.ref(0);
const isScrolling = vueDemi.ref(false);
const arrivedState = vueDemi.reactive({
left: true,
right: false,
top: true,
bottom: false
});
const directions = vueDemi.reactive({
left: false,
right: false,
top: false,
bottom: false
});
if (element) {
const onScrollEnd = shared.useDebounceFn((e) => {
isScrolling.value = false;
directions.left = false;
directions.right = false;
directions.top = false;
directions.bottom = false;
onStop(e);
}, throttle + idle);
const onScrollHandler = (e) => {
const eventTarget = e.target === document ? e.target.documentElement : e.target;
const scrollLeft = eventTarget.scrollLeft;
directions.left = scrollLeft < x.value;
directions.right = scrollLeft > x.value;
arrivedState.left = scrollLeft <= 0 + (offset.left || 0);
arrivedState.right = scrollLeft + eventTarget.clientWidth >= eventTarget.scrollWidth - (offset.right || 0);
x.value = scrollLeft;
const scrollTop = eventTarget.scrollTop;
directions.top = scrollTop < y.value;
directions.bottom = scrollTop > y.value;
arrivedState.top = scrollTop <= 0 + (offset.top || 0);
arrivedState.bottom = scrollTop + eventTarget.clientHeight >= eventTarget.scrollHeight - (offset.bottom || 0);
y.value = scrollTop;
isScrolling.value = true;
onScrollEnd(e);
onScroll(e);
};
useEventListener(element, "scroll", throttle ? shared.useThrottleFn(onScrollHandler, throttle) : onScrollHandler, eventListenerOptions);
}
return {
x,
y,
isScrolling,
arrivedState,
directions
};
}
var __defProp$7 = Object.defineProperty;
var __defProps$2 = Object.defineProperties;
var __getOwnPropDescs$2 = Object.getOwnPropertyDescriptors;
var __getOwnPropSymbols$8 = Object.getOwnPropertySymbols;
var __hasOwnProp$8 = Object.prototype.hasOwnProperty;
var __propIsEnum$8 = Object.prototype.propertyIsEnumerable;
var __defNormalProp$7 = (obj, key, value) => key in obj ? __defProp$7(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
var __spreadValues$7 = (a, b) => {
for (var prop in b || (b = {}))
if (__hasOwnProp$8.call(b, prop))
__defNormalProp$7(a, prop, b[prop]);
if (__getOwnPropSymbols$8)
for (var prop of __getOwnPropSymbols$8(b)) {
if (__propIsEnum$8.call(b, prop))
__defNormalProp$7(a, prop, b[prop]);
}
return a;
};
var __spreadProps$2 = (a, b) => __defProps$2(a, __getOwnPropDescs$2(b));
function useInfiniteScroll(element, onLoadMore, options = {}) {
var _a;
const state = vueDemi.reactive(useScroll(element, __spreadProps$2(__spreadValues$7({}, options), {
offset: __spreadValues$7({
bottom: (_a = options.distance) != null ? _a : 0
}, options.offset)
})));
vueDemi.watch(() => state.arrivedState.bottom, (v) => {
if (v)
onLoadMore(state);
});
}
function useIntersectionObserver(target, callback, options = {}) {
const {
root,
rootMargin = "0px",
threshold = 0.1,
window = defaultWindow
} = options;
const isSupported = window && "IntersectionObserver" in window;
let cleanup = shared.noop;
const stopWatch = isSupported ? vueDemi.watch(() => ({
el: unrefElement(target),
root: unrefElement(root)
}), ({ el, root: root2 }) => {
cleanup();
if (!el)
return;
const observer = new IntersectionObserver(callback, {
root: root2,
rootMargin,
threshold
});
observer.observe(el);
cleanup = () => {
observer.disconnect();
cleanup = shared.noop;
};
}, { immediate: true, flush: "post" }) : shared.noop;
const stop = () => {
cleanup();
stopWatch();
};
shared.tryOnScopeDispose(stop);
return {
isSupported,
stop
};
}
const defaultEvents = ["mousedown", "mouseup", "keydown", "keyup"];
function useKeyModifier(modifier, options = {}) {
const {
events = defaultEvents,
document = defaultDocument,
initial = null
} = options;
const state = vueDemi.ref(initial);
if (document) {
events.forEach((listenerEvent) => {
useEventListener(document, listenerEvent, (evt) => {
if (typeof evt.getModifierState === "function")
state.value = evt.getModifierState(modifier);
});
});
}
return state;
}
function useLocalStorage(key, initialValue, options = {}) {
const { window = defaultWindow } = options;
return useStorage(key, initialValue, window == null ? void 0 : window.localStorage, options);
}
const DefaultMagicKeysAliasMap = {
ctrl: "control",
command: "meta",
cmd: "meta",
option: "alt",
up: "arrowup",
down: "arrowdown",
left: "arrowleft",
right: "arrowright"
};
function useMagicKeys(options = {}) {
const {
reactive: useReactive = false,
target = defaultWindow,
aliasMap = DefaultMagicKeysAliasMap,
passive = true,
onEventFired = shared.noop
} = options;
const current = vueDemi.reactive(/* @__PURE__ */ new Set());
const obj = { toJSON() {
return {};
}, current };
const refs = useReactive ? vueDemi.reactive(obj) : obj;
const metaDeps = /* @__PURE__ */ new Set();
function setRefs(key, value) {
if (key in refs) {
if (useReactive)
refs[key] = value;
else
refs[key].value = value;
}
}
function updateRefs(e, value) {
var _a, _b;
const key = (_a = e.key) == null ? void 0 : _a.toLowerCase();
const code = (_b = e.code) == null ? void 0 : _b.toLowerCase();
const values = [code, key].filter(Boolean);
if (code) {
if (value)
current.add(e.code);
else
current.delete(e.code);
}
for (const key2 of values)
setRefs(key2, value);
if (key === "meta" && !value) {
metaDeps.forEach((key2) => {
current.delete(key2);
setRefs(key2, false);
});
metaDeps.clear();
} else if (e.getModifierState("Meta") && value) {
[...current, ...values].forEach((key2) => metaDeps.add(key2));
}
}
if (target) {
useEventListener(target, "keydown", (e) => {
updateRefs(e, true);
return onEventFired(e);
}, { passive });
useEventListener(target, "keyup", (e) => {
updateRefs(e, false);
return onEventFired(e);
}, { passive });
}
const proxy = new Proxy(refs, {
get(target2, prop, rec) {
if (typeof prop !== "string")
return Reflect.get(target2, prop, rec);
prop = prop.toLowerCase();
if (prop in aliasMap)
prop = aliasMap[prop];
if (!(prop in refs)) {
if (/[+_-]/.test(prop)) {
const keys = prop.split(/[+_-]/g).map((i) => i.trim());
refs[prop] = vueDemi.computed(() => keys.every((key) => vueDemi.unref(proxy[key])));
} else {
refs[prop] = vueDemi.ref(false);
}
}
const r = Reflect.get(target2, prop, rec);
return useReactive ? vueDemi.unref(r) : r;
}
});
return proxy;
}
var __defProp$6 = Object.defineProperty;
var __getOwnPropSymbols$7 = Object.getOwnPropertySymbols;
var __hasOwnProp$7 = Object.prototype.hasOwnProperty;
var __propIsEnum$7 = Object.prototype.propertyIsEnumerable;
var __defNormalProp$6 = (obj, key, value) => key in obj ? __defProp$6(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
var __spreadValues$6 = (a, b) => {
for (var prop in b || (b = {}))
if (__hasOwnProp$7.call(b, prop))
__defNormalProp$6(a, prop, b[prop]);
if (__getOwnPropSymbols$7)
for (var prop of __getOwnPropSymbols$7(b)) {
if (__propIsEnum$7.call(b, prop))
__defNormalProp$6(a, prop, b[prop]);
}
return a;
};
function usingElRef(source, cb) {
if (vueDemi.unref(source))
cb(vueDemi.unref(source));
}
function timeRangeToArray(timeRanges) {
let ranges = [];
for (let i = 0; i < timeRanges.length; ++i)
ranges = [...ranges, [timeRanges.start(i), timeRanges.end(i)]];
return ranges;
}
function tracksToArray(tracks) {
return Array.from(tracks).map(({ label, kind, language, mode, activeCues, cues, inBandMetadataTrackDispatchType }, id) => ({ id, label, kind, language, mode, activeCues, cues, inBandMetadataTrackDispatchType }));
}
const defaultOptions = {
src: "",
tracks: []
};
function useMediaControls(target, options = {}) {
options = __spreadValues$6(__spreadValues$6({}, defaultOptions), options);
const {
document = defaultDocument
} = options;
const currentTime = vueDemi.ref(0);
const duration = vueDemi.ref(0);
const seeking = vueDemi.ref(false);
const volume = vueDemi.ref(1);
const waiting = vueDemi.ref(false);
const ended = vueDemi.ref(false);
const playing = vueDemi.ref(false);
const rate = vueDemi.ref(1);
const stalled = vueDemi.ref(false);
const buffered = vueDemi.ref([]);
const tracks = vueDemi.ref([]);
const selectedTrack = vueDemi.ref(-1);
const isPictureInPicture = vueDemi.ref(false);
const muted = vueDemi.ref(false);
const supportsPictureInPicture = document && "pictureInPictureEnabled" in document;
const sourceErrorEvent = shared.createEventHook();
const disableTrack = (track) => {
usingElRef(target, (el) => {
if (track) {
const id = shared.isNumber(track) ? track : track.id;
el.textTracks[id].mode = "disabled";
} else {
for (let i = 0; i < el.textTracks.length; ++i)
el.textTracks[i].mode = "disabled";
}
selectedTrack.value = -1;
});
};
const enableTrack = (track, disableTracks = true) => {
usingElRef(target, (el) => {
const id = shared.isNumber(track) ? track : track.id;
if (disableTracks)
disableTrack();
el.textTracks[id].mode = "showing";
selectedTrack.value = id;
});
};
const togglePictureInPicture = () => {
return new Promise((resolve, reject) => {
usingElRef(target, async (el) => {
if (supportsPictureInPicture) {
if (!isPictureInPicture.value) {
el.requestPictureInPicture().then(resolve).catch(reject);
} else {
document.exitPictureInPicture().then(resolve).catch(reject);
}
}
});
});
};
vueDemi.watchEffect(() => {
if (!document)
return;
const el = vueDemi.unref(target);
if (!el)
return;
const src = vueDemi.unref(options.src);
let sources = [];
if (!src)
return;
if (shared.isString(src))
sources = [{ src }];
else if (Array.isArray(src))
sources = src;
else if (shared.isObject(src))
sources = [src];
el.querySelectorAll("source").forEach((e) => {
e.removeEventListener("error", sourceErrorEvent.trigger);
e.remove();
});
sources.forEach(({ src: src2, type }) => {
const source = document.createElement("source");
source.setAttribute("src", src2);
source.setAttribute("type", type || "");
source.addEventListener("error", sourceErrorEvent.trigger);
el.appendChild(source);
});
el.load();
});
shared.tryOnScopeDispose(() => {
const el = vueDemi.unref(target);
if (!el)
return;
el.querySelectorAll("source").forEach((e) => e.removeEventListener("error", sourceErrorEvent.trigger));
});
vueDemi.watch(volume, (vol) => {
const el = vueDemi.unref(target);
if (!el)
return;
el.volume = vol;
});
vueDemi.watch(muted, (mute) => {
const el = vueDemi.unref(target);
if (!el)
return;
el.muted = mute;
});
vueDemi.watch(rate, (rate2) => {
const el = vueDemi.unref(target);
if (!el)
return;
el.playbackRate = rate2;
});
vueDemi.watchEffect(() => {
if (!document)
return;
const textTracks = vueDemi.unref(options.tracks);
const el = vueDemi.unref(target);
if (!textTracks || !textTracks.length || !el)
return;
el.querySelectorAll("track").forEach((e) => e.remove());
textTracks.forEach(({ default: isDefault, kind, label, src, srcLang }, i) => {
const track = document.createElement("track");
track.default = isDefault || false;
track.kind = kind;
track.label = label;
track.src = src;
track.srclang = srcLang;
if (track.default)
selectedTrack.value = i;
el.appendChild(track);
});
});
const { ignoreUpdates: ignoreCurrentTimeUpdates } = shared.watchIgnorable(currentTime, (time) => {
const el = vueDemi.unref(target);
if (!el)
return;
el.currentTime = time;
});
const { ignoreUpdates: ignorePlayingUpdates } = shared.watchIgnorable(playing, (isPlaying) => {
const el = vueDemi.unref(target);
if (!el)
return;
isPlaying ? el.play() : el.pause();
});
useEventListener(target, "timeupdate", () => ignoreCurrentTimeUpdates(() => currentTime.value = vueDemi.unref(target).currentTime));
useEventListener(target, "durationchange", () => duration.value = vueDemi.unref(target).duration);
useEventListener(target, "progress", () => buffered.value = timeRangeToArray(vueDemi.unref(target).buffered));
useEventListener(target, "seeking", () => seeking.value = true);
useEventListener(target, "seeked", () => seeking.value = false);
useEventListener(target, "waiting", () => waiting.value = true);
useEventListener(target, "playing", () => waiting.value = false);
useEventListener(target, "ratechange", () => rate.value = vueDemi.unref(target).playbackRate);
useEventListener(target, "stalled", () => stalled.value = true);
useEventListener(target, "ended", () => ended.value = true);
useEventListener(target, "pause", () => ignorePlayingUpdates(() => playing.value = false));
useEventListener(target, "play", () => ignorePlayingUpdates(() => playing.value = true));
useEventListener(target, "enterpictureinpicture", () => isPictureInPicture.value = true);
useEventListener(target, "leavepictureinpicture", () => isPictureInPicture.value = false);
useEventListener(target, "volumechange", () => {
const el = vueDemi.unref(target);
if (!el)
return;
volume.value = el.volume;
muted.value = el.muted;
});
const listeners = [];
const stop = vueDemi.watch([target], () => {
const el = vueDemi.unref(target);
if (!el)
return;
stop();
listeners[0] = useEventListener(el.textTracks, "addtrack", () => tracks.value = tracksToArray(el.textTracks));
listeners[1] = useEventListener(el.textTracks, "removetrack", () => tracks.value = tracksToArray(el.textTracks));
listeners[2] = useEventListener(el.textTracks, "change", () => tracks.value = tracksToArray(el.textTracks));
});
shared.tryOnScopeDispose(() => listeners.forEach((listener) => listener()));
return {
currentTime,
duration,
waiting,
seeking,
ended,
stalled,
buffered,
playing,
rate,
volume,
muted,
tracks,
selectedTrack,
enableTrack,
disableTrack,
supportsPictureInPicture,
togglePictureInPicture,
isPictureInPicture,
onSourceError: sourceErrorEvent.on
};
}
const getMapVue2Compat = () => {
const data = vueDemi.reactive({});
return {
get: (key) => data[key],
set: (key, value) => vueDemi.set(data, key, value),
has: (key) => Object.prototype.hasOwnProperty.call(data, key),
delete: (key) => vueDemi.del(data, key),
clear: () => {
Object.keys(data).forEach((key) => {
vueDemi.del(data, key);
});
}
};
};
function useMemoize(resolver, options) {
const initCache = () => {
if (options == null ? void 0 : options.cache)
return vueDemi.reactive(options.cache);
if (vueDemi.isVue2)
return getMapVue2Compat();
return vueDemi.reactive(/* @__PURE__ */ new Map());
};
const cache = initCache();
const generateKey = (...args) => (options == null ? void 0 : options.getKey) ? options.getKey(...args) : JSON.stringify(args);
const _loadData = (key, ...args) => {
cache.set(key, resolver(...args));
return cache.get(key);
};
const loadData = (...args) => _loadData(generateKey(...args), ...args);
const deleteData = (...args) => {
cache.delete(generateKey(...args));
};
const clearData = () => {
cache.clear();
};
const memoized = (...args) => {
const key = generateKey(...args);
if (cache.has(key))
return cache.get(key);
return _loadData(key, ...args);
};
memoized.load = loadData;
memoized.delete = deleteData;
memoized.clear = clearData;
memoized.generateKey = generateKey;
memoized.cache = cache;
return memoized;
}
function useMemory(options = {}) {
const memory = vueDemi.ref();
const isSupported = typeof performance !== "undefined" && "memory" in performance;
if (isSupported) {
const { interval = 1e3 } = options;
shared.useIntervalFn(() => {
memory.value = performance.memory;
}, interval, { immediate: options.immediate, immediateCallback: options.immediateCallback });
}
return { isSupported, memory };
}
function useMounted() {
const isMounted = vueDemi.ref(false);
vueDemi.onMounted(() => {
isMounted.value = true;
});
return isMounted;
}
function useMouse(options = {}) {
const {
type = "page",
touch = true,
resetOnTouchEnds = false,
initialValue = { x: 0, y: 0 },
window = defaultWindow,
eventFilter
} = options;
const x = vueDemi.ref(initialValue.x);
const y = vueDemi.ref(initialValue.y);
const sourceType = vueDemi.ref(null);
const mouseHandler = (event) => {
if (type === "page") {
x.value = event.pageX;
y.value = event.pageY;
} else if (type === "client") {
x.value = event.clientX;
y.value = event.clientY;
}
sourceType.value = "mouse";
};
const reset = () => {
x.value = initialValue.x;
y.value = initialValue.y;
};
const touchHandler = (event) => {
if (event.touches.length > 0) {
const touch2 = event.touches[0];
if (type === "page") {
x.value = touch2.pageX;
y.value = touch2.pageY;
} else if (type === "client") {
x.value = touch2.clientX;
y.value = touch2.clientY;
}
sourceType.value = "touch";
}
};
const mouseHandlerWrapper = (event) => {
return eventFilter === void 0 ? mouseHandler(event) : eventFilter(() => mouseHandler(event), {});
};
const touchHandlerWrapper = (event) => {
return eventFilter === void 0 ? touchHandler(event) : eventFilter(() => touchHandler(event), {});
};
if (window) {
useEventListener(window, "mousemove", mouseHandlerWrapper, { passive: true });
useEventListener(window, "dragover", mouseHandlerWrapper, { passive: true });
if (touch) {
useEventListener(window, "touchstart", touchHandlerWrapper, { passive: true });
useEventListener(window, "touchmove", touchHandlerWrapper, { passive: true });
if (resetOnTouchEnds)
useEventListener(window, "touchend", reset, { passive: true });
}
}
return {
x,
y,
sourceType
};
}
function useMouseInElement(target, options = {}) {
const {
handleOutside = true,
window = defaultWindow
} = options;
const { x, y, sourceType } = useMouse(options);
const targetRef = vueDemi.ref(target != null ? target : window == null ? void 0 : window.document.body);
const elementX = vueDemi.ref(0);
const elementY = vueDemi.ref(0);
const elementPositionX = vueDemi.ref(0);
const elementPositionY = vueDemi.ref(0);
const elementHeight = vueDemi.ref(0);
const elementWidth = vueDemi.ref(0);
const isOutside = vueDemi.ref(false);
let stop = () => {
};
if (window) {
stop = vueDemi.watch([targetRef, x, y], () => {
const el = unrefElement(targetRef);
if (!el)
return;
const {
left,
top,
width,
height
} = el.getBoundingClientRect();
elementPositionX.value = left + window.pageXOffset;
elementPositionY.value = top + window.pageYOffset;
elementHeight.value = height;
elementWidth.value = width;
const elX = x.value - elementPositionX.value;
const elY = y.value - elementPositionY.value;
isOutside.value = elX < 0 || elY < 0 || elX > elementWidth.value || elY > elementHeight.value;
if (handleOutside || !isOutside.value) {
elementX.value = elX;
elementY.value = elY;
}
}, { immediate: true });
}
return {
x,
y,
sourceType,
elementX,
elementY,
elementPositionX,
elementPositionY,
elementHeight,
elementWidth,
isOutside,
stop
};
}
function useMousePressed(options = {}) {
const {
touch = true,
drag = true,
initialValue = false,
window = defaultWindow
} = options;
const pressed = vueDemi.ref(initialValue);
const sourceType = vueDemi.ref(null);
if (!window) {
return {
pressed,
sourceType
};
}
const onPressed = (srcType) => () => {
pressed.value = true;
sourceType.value = srcType;
};
const onReleased = () => {
pressed.value = false;
sourceType.value = null;
};
const target = vueDemi.computed(() => unrefElement(options.target) || window);
useEventListener(target, "mousedown", onPressed("mouse"), { passive: true });
useEventListener(window, "mouseleave", onReleased, { passive: true });
useEventListener(window, "mouseup", onReleased, { passive: true });
if (drag) {
useEventListener(target, "dragstart", onPressed("mouse"), { passive: true });
useEventListener(window, "drop", onReleased, { passive: true });
useEventListener(window, "dragend", onReleased, { passive: true });
}
if (touch) {
useEventListener(target, "touchstart", onPressed("touch"), { passive: true });
useEventListener(window, "touchend", onReleased, { passive: true });
useEventListener(window, "touchcancel", onReleased, { passive: true });
}
return {
pressed,
sourceType
};
}
var __getOwnPropSymbols$6 = Object.getOwnPropertySymbols;
var __hasOwnProp$6 = Object.prototype.hasOwnProperty;
var __propIsEnum$6 = Object.prototype.propertyIsEnumerable;
var __objRest$1 = (source, exclude) => {
var target = {};
for (var prop in source)
if (__hasOwnProp$6.call(source, prop) && exclude.indexOf(prop) < 0)
target[prop] = source[prop];
if (source != null && __getOwnPropSymbols$6)
for (var prop of __getOwnPropSymbols$6(source)) {
if (exclude.indexOf(prop) < 0 && __propIsEnum$6.call(source, prop))
target[prop] = source[prop];
}
return target;
};
function useMutationObserver(target, callback, options = {}) {
const _a = options, { window = defaultWindow } = _a, mutationOptions = __objRest$1(_a, ["window"]);
let observer;
const isSupported = window && "IntersectionObserver" in window;
const cleanup = () => {
if (observer) {
observer.disconnect();
observer = void 0;
}
};
const stopWatch = vueDemi.watch(() => unrefElement(target), (el) => {
cleanup();
if (isSupported && window && el) {
observer = new MutationObserver(callback);
observer.observe(el, mutationOptions);
}
}, { immediate: true });
const stop = () => {
cleanup();
stopWatch();
};
shared.tryOnScopeDispose(stop);
return {
isSupported,
stop
};
}
const useNavigatorLanguage = (options = {}) => {
const { window = defaultWindow } = options;
const navigator = window == null ? void 0 : window.navigator;
const isSupported = Boolean(navigator && "language" in navigator);
const language = vueDemi.ref(navigator == null ? void 0 : navigator.language);
useEventListener(window, "languagechange", () => {
if (navigator)
language.value = navigator.language;
});
return {
isSupported,
language
};
};
function useNetwork(options = {}) {
const { window = defaultWindow } = options;
const navigator = window == null ? void 0 : window.navigator;
const isSupported = Boolean(navigator && "connection" in navigator);
const isOnline = vueDemi.ref(true);
const saveData = vueDemi.ref(false);
const offlineAt = vueDemi.ref(void 0);
const downlink = vueDemi.ref(void 0);
const downlinkMax = vueDemi.ref(void 0);
const rtt = vueDemi.ref(void 0);
const effectiveType = vueDemi.ref(void 0);
const type = vueDemi.ref("unknown");
const connection = isSupported && navigator.connection;
function updateNetworkInformation() {
if (!navigator)
return;
isOnline.value = navigator.onLine;
offlineAt.value = isOnline.value ? void 0 : Date.now();
if (connection) {
downlink.value = connection.downlink;
downlinkMax.value = connection.downlinkMax;
effectiveType.value = connection.effectiveType;
rtt.value = connection.rtt;
saveData.value = connection.saveData;
type.value = connection.type;
}
}
if (window) {
useEventListener(window, "offline", () => {
isOnline.value = false;
offlineAt.value = Date.now();
});
useEventListener(window, "online", () => {
isOnline.value = true;
});
}
if (connection)
useEventListener(connection, "change", updateNetworkInformation, false);
updateNetworkInformation();
return {
isSupported,
isOnline,
saveData,
offlineAt,
downlink,
downlinkMax,
effectiveType,
rtt,
type
};
}
var __defProp$5 = Object.defineProperty;
var __getOwnPropSymbols$5 = Object.getOwnPropertySymbols;
var __hasOwnProp$5 = Object.prototype.hasOwnProperty;
var __propIsEnum$5 = Object.prototype.propertyIsEnumerable;
var __defNormalProp$5 = (obj, key, value) => key in obj ? __defProp$5(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
var __spreadValues$5 = (a, b) => {
for (var prop in b || (b = {}))
if (__hasOwnProp$5.call(b, prop))
__defNormalProp$5(a, prop, b[prop]);
if (__getOwnPropSymbols$5)
for (var prop of __getOwnPropSymbols$5(b)) {
if (__propIsEnum$5.call(b, prop))
__defNormalProp$5(a, prop, b[prop]);
}
return a;
};
function useNow(options = {}) {
const {
controls: exposeControls = false,
interval = "requestAnimationFrame"
} = options;
const now = vueDemi.ref(new Date());
const update = () => now.value = new Date();
const controls = interval === "requestAnimationFrame" ? useRafFn(update, { immediate: true }) : shared.useIntervalFn(update, interval, { immediate: true });
if (exposeControls) {
return __spreadValues$5({
now
}, controls);
} else {
return now;
}
}
function useOffsetPagination(options) {
const {
total = Infinity,
pageSize = 10,
page = 1,
onPageChange = shared.noop,
onPageSizeChange = shared.noop,
onPageCountChange = shared.noop
} = options;
const currentPageSize = useClamp(pageSize, 1, Infinity);
const pageCount = vueDemi.computed(() => Math.ceil(vueDemi.unref(total) / vueDemi.unref(currentPageSize)));
const currentPage = useClamp(page, 1, pageCount);
const isFirstPage = vueDemi.computed(() => currentPage.value === 1);
const isLastPage = vueDemi.computed(() => currentPage.value === pageCount.value);
if (vueDemi.isRef(page))
shared.syncRef(page, currentPage);
if (vueDemi.isRef(pageSize))
shared.syncRef(pageSize, currentPageSize);
function prev() {
currentPage.value--;
}
function next() {
currentPage.value++;
}
const returnValue = {
currentPage,
currentPageSize,
pageCount,
isFirstPage,
isLastPage,
prev,
next
};
vueDemi.watch(currentPage, () => {
onPageChange(vueDemi.reactive(returnValue));
});
vueDemi.watch(currentPageSize, () => {
onPageSizeChange(vueDemi.reactive(returnValue));
});
vueDemi.watch(pageCount, () => {
onPageCountChange(vueDemi.reactive(returnValue));
});
return returnValue;
}
function useOnline(options = {}) {
const { isOnline } = useNetwork(options);
return isOnline;
}
function usePageLeave(options = {}) {
const { window = defaultWindow } = options;
const isLeft = vueDemi.ref(false);
const handler = (event) => {
if (!window)
return;
event = event || window.event;
const from = event.relatedTarget || event.toElement;
isLeft.value = !from;
};
if (window) {
useEventListener(window, "mouseout", handler, { passive: true });
useEventListener(window.document, "mouseleave", handler, { passive: true });
useEventListener(window.document, "mouseenter", handler, { passive: true });
}
return isLeft;
}
function useParallax(target, options = {}) {
const {
deviceOrientationTiltAdjust = (i) => i,
deviceOrientationRollAdjust = (i) => i,
mouseTiltAdjust = (i) => i,
mouseRollAdjust = (i) => i,
window = defaultWindow
} = options;
const orientation = vueDemi.reactive(useDeviceOrientation({ window }));
const {
elementX: x,
elementY: y,
elementWidth: width,
elementHeight: height
} = useMouseInElement(target, { handleOutside: false, window });
const source = vueDemi.computed(() => {
if (orientation.isSupported && (orientation.alpha != null && orientation.alpha !== 0 || orientation.gamma != null && orientation.gamma !== 0))
return "deviceOrientation";
return "mouse";
});
const roll = vueDemi.computed(() => {
if (source.value === "deviceOrientation") {
const value = -orientation.beta / 90;
return deviceOrientationRollAdjust(value);
} else {
const value = -(y.value - height.value / 2) / height.value;
return mouseRollAdjust(value);
}
});
const tilt = vueDemi.computed(() => {
if (source.value === "deviceOrientation") {
const value = orientation.gamma / 90;
return deviceOrientationTiltAdjust(value);
} else {
const value = (x.value - width.value / 2) / width.value;
return mouseTiltAdjust(value);
}
});
return { roll, tilt, source };
}
var __defProp$4 = Object.defineProperty;
var __defProps$1 = Object.defineProperties;
var __getOwnPropDescs$1 = Object.getOwnPropertyDescriptors;
var __getOwnPropSymbols$4 = Object.getOwnPropertySymbols;
var __hasOwnProp$4 = Object.prototype.hasOwnProperty;
var __propIsEnum$4 = Object.prototype.propertyIsEnumerable;
var __defNormalProp$4 = (obj, key, value) => key in obj ? __defProp$4(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
var __spreadValues$4 = (a, b) => {
for (var prop in b || (b = {}))
if (__hasOwnProp$4.call(b, prop))
__defNormalProp$4(a, prop, b[prop]);
if (__getOwnPropSymbols$4)
for (var prop of __getOwnPropSymbols$4(b)) {
if (__propIsEnum$4.call(b, prop))
__defNormalProp$4(a, prop, b[prop]);
}
return a;
};
var __spreadProps$1 = (a, b) => __defProps$1(a, __getOwnPropDescs$1(b));
const defaultState = {
x: 0,
y: 0,
pointerId: 0,
pressure: 0,
tiltX: 0,
tiltY: 0,
width: 0,
height: 0,
twist: 0,
pointerType: null
};
const keys = /* @__PURE__ */ Object.keys(defaultState);
function usePointer(options = {}) {
const {
target = defaultWindow
} = options;
const isInside = vueDemi.ref(false);
const state = vueDemi.ref(options.initialValue || {});
Object.assign(state.value, defaultState, state.value);
const handler = (event) => {
isInside.value = true;
if (options.pointerTypes && !options.pointerTypes.includes(event.pointerType))
return;
state.value = shared.objectPick(event, keys, false);
};
if (target) {
useEventListener(target, "pointerdown", handler, { passive: true });
useEventListener(target, "pointermove", handler, { passive: true });
useEventListener(target, "pointerleave", () => isInside.value = false, { passive: true });
}
return __spreadProps$1(__spreadValues$4({}, shared.toRefs(state)), {
isInside
});
}
var SwipeDirection = /* @__PURE__ */ ((SwipeDirection2) => {
SwipeDirection2["UP"] = "UP";
SwipeDirection2["RIGHT"] = "RIGHT";
SwipeDirection2["DOWN"] = "DOWN";
SwipeDirection2["LEFT"] = "LEFT";
SwipeDirection2["NONE"] = "NONE";
return SwipeDirection2;
})(SwipeDirection || {});
function useSwipe(target, options = {}) {
const {
threshold = 50,
onSwipe,
onSwipeEnd,
onSwipeStart,
passive = true,
window = defaultWindow
} = options;
const coordsStart = vueDemi.reactive({ x: 0, y: 0 });
const coordsEnd = vueDemi.reactive({ x: 0, y: 0 });
const diffX = vueDemi.computed(() => coordsStart.x - coordsEnd.x);
const diffY = vueDemi.computed(() => coordsStart.y - coordsEnd.y);
const { max, abs } = Math;
const isThresholdExceeded = vueDemi.computed(() => max(abs(diffX.value), abs(diffY.value)) >= threshold);
const isSwiping = vueDemi.ref(false);
const direction = vueDemi.computed(() => {
if (!isThresholdExceeded.value)
return "NONE" /* NONE */;
if (abs(diffX.value) > abs(diffY.value)) {
return diffX.value > 0 ? "LEFT" /* LEFT */ : "RIGHT" /* RIGHT */;
} else {
return diffY.value > 0 ? "UP" /* UP */ : "DOWN" /* DOWN */;
}
});
const getTouchEventCoords = (e) => [e.touches[0].clientX, e.touches[0].clientY];
const updateCoordsStart = (x, y) => {
coordsStart.x = x;
coordsStart.y = y;
};
const updateCoordsEnd = (x, y) => {
coordsEnd.x = x;
coordsEnd.y = y;
};
let listenerOptions;
const isPassiveEventSupported = checkPassiveEventSupport(window == null ? void 0 : window.document);
if (!passive)
listenerOptions = isPassiveEventSupported ? { passive: false, capture: true } : { capture: true };
else
listenerOptions = isPassiveEventSupported ? { passive: true } : { capture: false };
const onTouchEnd = (e) => {
if (isSwiping.value)
onSwipeEnd == null ? void 0 : onSwipeEnd(e, direction.value);
isSwiping.value = false;
};
const stops = [
useEventListener(target, "touchstart", (e) => {
if (listenerOptions.capture && !listenerOptions.passive)
e.preventDefault();
const [x, y] = getTouchEventCoords(e);
updateCoordsStart(x, y);
updateCoordsEnd(x, y);
onSwipeStart == null ? void 0 : onSwipeStart(e);
}, listenerOptions),
useEventListener(target, "touchmove", (e) => {
const [x, y] = getTouchEventCoords(e);
updateCoordsEnd(x, y);
if (!isSwiping.value && isThresholdExceeded.value)
isSwiping.value = true;
if (isSwiping.value)
onSwipe == null ? void 0 : onSwipe(e);
}, listenerOptions),
useEventListener(target, "touchend", onTouchEnd, listenerOptions),
useEventListener(target, "touchcancel", onTouchEnd, listenerOptions)
];
const stop = () => stops.forEach((s) => s());
return {
isPassiveEventSupported,
isSwiping,
direction,
coordsStart,
coordsEnd,
lengthX: diffX,
lengthY: diffY,
stop
};
}
function checkPassiveEventSupport(document) {
if (!document)
return false;
let supportsPassive = false;
const optionsBlock = {
get passive() {
supportsPassive = true;
return false;
}
};
document.addEventListener("x", shared.noop, optionsBlock);
document.removeEventListener("x", shared.noop);
return supportsPassive;
}
function usePointerSwipe(target, options = {}) {
const targetRef = vueDemi.ref(target);
const {
threshold = 50,
onSwipe,
onSwipeEnd,
onSwipeStart
} = options;
const posStart = vueDemi.reactive({ x: 0, y: 0 });
const updatePosStart = (x, y) => {
posStart.x = x;
posStart.y = y;
};
const posEnd = vueDemi.reactive({ x: 0, y: 0 });
const updatePosEnd = (x, y) => {
posEnd.x = x;
posEnd.y = y;
};
const distanceX = vueDemi.computed(() => posStart.x - posEnd.x);
const distanceY = vueDemi.computed(() => posStart.y - posEnd.y);
const { max, abs } = Math;
const isThresholdExceeded = vueDemi.computed(() => max(abs(distanceX.value), abs(distanceY.value)) >= threshold);
const isSwiping = vueDemi.ref(false);
const isPointerDown = vueDemi.ref(false);
const direction = vueDemi.computed(() => {
if (!isThresholdExceeded.value)
return SwipeDirection.NONE;
if (abs(distanceX.value) > abs(distanceY.value)) {
return distanceX.value > 0 ? SwipeDirection.LEFT : SwipeDirection.RIGHT;
} else {
return distanceY.value > 0 ? SwipeDirection.UP : SwipeDirection.DOWN;
}
});
const filterEvent = (e) => {
if (options.pointerTypes)
return options.pointerTypes.includes(e.pointerType);
return true;
};
const stops = [
useEventListener(target, "pointerdown", (e) => {
var _a, _b;
if (!filterEvent(e))
return;
isPointerDown.value = true;
(_b = (_a = targetRef.value) == null ? void 0 : _a.style) == null ? void 0 : _b.setProperty("touch-action", "none");
const eventTarget = e.target;
eventTarget == null ? void 0 : eventTarget.setPointerCapture(e.pointerId);
const { clientX: x, clientY: y } = e;
updatePosStart(x, y);
updatePosEnd(x, y);
onSwipeStart == null ? void 0 : onSwipeStart(e);
}),
useEventListener(target, "pointermove", (e) => {
if (!filterEvent(e))
return;
if (!isPointerDown.value)
return;
const { clientX: x, clientY: y } = e;
updatePosEnd(x, y);
if (!isSwiping.value && isThresholdExceeded.value)
isSwiping.value = true;
if (isSwiping.value)
onSwipe == null ? void 0 : onSwipe(e);
}),
useEventListener(target, "pointerup", (e) => {
var _a, _b;
if (!filterEvent(e))
return;
if (isSwiping.value)
onSwipeEnd == null ? void 0 : onSwipeEnd(e, direction.value);
isPointerDown.value = false;
isSwiping.value = false;
(_b = (_a = targetRef.value) == null ? void 0 : _a.style) == null ? void 0 : _b.setProperty("touch-action", "initial");
})
];
const stop = () => stops.forEach((s) => s());
return {
isSwiping: vueDemi.readonly(isSwiping),
direction: vueDemi.readonly(direction),
posStart: vueDemi.readonly(posStart),
posEnd: vueDemi.readonly(posEnd),
distanceX,
distanceY,
stop
};
}
function usePreferredColorScheme(options) {
const isLight = useMediaQuery("(prefers-color-scheme: light)", options);
const isDark = useMediaQuery("(prefers-color-scheme: dark)", options);
return vueDemi.computed(() => {
if (isDark.value)
return "dark";
if (isLight.value)
return "light";
return "no-preference";
});
}
function usePreferredLanguages(options = {}) {
const { window = defaultWindow } = options;
if (!window)
return vueDemi.ref(["en"]);
const navigator = window.navigator;
const value = vueDemi.ref(navigator.languages);
useEventListener(window, "languagechange", () => {
value.value = navigator.languages;
});
return value;
}
const topVarName = "--vueuse-safe-area-top";
const rightVarName = "--vueuse-safe-area-right";
const bottomVarName = "--vueuse-safe-area-bottom";
const leftVarName = "--vueuse-safe-area-left";
function useScreenSafeArea() {
const top = vueDemi.ref("");
const right = vueDemi.ref("");
const bottom = vueDemi.ref("");
const left = vueDemi.ref("");
if (shared.isClient) {
const topCssVar = useCssVar(topVarName);
const rightCssVar = useCssVar(rightVarName);
const bottomCssVar = useCssVar(bottomVarName);
const leftCssVar = useCssVar(leftVarName);
topCssVar.value = "env(safe-area-inset-top, 0px)";
rightCssVar.value = "env(safe-area-inset-right, 0px)";
bottomCssVar.value = "env(safe-area-inset-bottom, 0px)";
leftCssVar.value = "env(safe-area-inset-left, 0px)";
update();
useEventListener("resize", shared.useDebounceFn(update));
}
function update() {
top.value = getValue(topVarName);
right.value = getValue(rightVarName);
bottom.value = getValue(bottomVarName);
left.value = getValue(leftVarName);
}
return {
top,
right,
bottom,
left,
update
};
}
function getValue(position) {
return getComputedStyle(document.documentElement).getPropertyValue(position);
}
function useScriptTag(src, onLoaded = shared.noop, options = {}) {
const {
immediate = true,
manual = false,
type = "text/javascript",
async = true,
crossOrigin,
referrerPolicy,
noModule,
defer,
document = defaultDocument,
attrs = {}
} = options;
const scriptTag = vueDemi.ref(null);
let _promise = null;
const loadScript = (waitForScriptLoad) => new Promise((resolve, reject) => {
const resolveWithElement = (el2) => {
scriptTag.value = el2;
resolve(el2);
return el2;
};
if (!document) {
resolve(false);
return;
}
let shouldAppend = false;
let el = document.querySelector(`script[src="${src}"]`);
if (!el) {
el = document.createElement("script");
el.type = type;
el.async = async;
el.src = vueDemi.unref(src);
if (defer)
el.defer = defer;
if (crossOrigin)
el.crossOrigin = crossOrigin;
if (noModule)
el.noModule = noModule;
if (referrerPolicy)
el.referrerPolicy = referrerPolicy;
for (const attr in attrs)
el[attr] = attrs[attr];
shouldAppend = true;
} else if (el.hasAttribute("data-loaded")) {
resolveWithElement(el);
}
el.addEventListener("error", (event) => reject(event));
el.addEventListener("abort", (event) => reject(event));
el.addEventListener("load", () => {
el.setAttribute("data-loaded", "true");
onLoaded(el);
resolveWithElement(el);
});
if (shouldAppend)
el = document.head.appendChild(el);
if (!waitForScriptLoad)
resolveWithElement(el);
});
const load = (waitForScriptLoad = true) => {
if (!_promise)
_promise = loadScript(waitForScriptLoad);
return _promise;
};
const unload = () => {
if (!document)
return;
_promise = null;
if (scriptTag.value)
scriptTag.value = null;
const el = document.querySelector(`script[src="${src}"]`);
if (el)
document.head.removeChild(el);
};
if (immediate && !manual)
shared.tryOnMounted(load);
if (!manual)
shared.tryOnUnmounted(unload);
return { scriptTag, load, unload };
}
var _a, _b;
function preventDefault(rawEvent) {
const e = rawEvent || window.event;
if (e.touches.length > 1)
return true;
if (e.preventDefault)
e.preventDefault();
return false;
}
const isIOS = shared.isClient && (window == null ? void 0 : window.navigator) && ((_a = window == null ? void 0 : window.navigator) == null ? void 0 : _a.platform) && /iP(ad|hone|od)/.test((_b = window == null ? void 0 : window.navigator) == null ? void 0 : _b.platform);
function useScrollLock(element, initialState = false) {
const isLocked = vueDemi.ref(initialState);
let touchMoveListener = null;
let initialOverflow;
vueDemi.watch(() => vueDemi.unref(element), (el) => {
if (el) {
const ele = el;
initialOverflow = ele.style.overflow;
if (isLocked.value)
ele.style.overflow = "hidden";
}
}, {
immediate: true
});
const lock = () => {
const ele = vueDemi.unref(element);
if (!ele || isLocked.value)
return;
if (isIOS) {
touchMoveListener = useEventListener(document, "touchmove", preventDefault, { passive: false });
}
ele.style.overflow = "hidden";
isLocked.value = true;
};
const unlock = () => {
const ele = vueDemi.unref(element);
if (!ele || !isLocked.value)
return;
isIOS && (touchMoveListener == null ? void 0 : touchMoveListener());
ele.style.overflow = initialOverflow;
isLocked.value = false;
};
return vueDemi.computed({
get() {
return isLocked.value;
},
set(v) {
if (v)
lock();
else
unlock();
}
});
}
function useSessionStorage(key, initialValue, options = {}) {
const { window = defaultWindow } = options;
return useStorage(key, initialValue, window == null ? void 0 : window.sessionStorage, options);
}
var __defProp$3 = Object.defineProperty;
var __getOwnPropSymbols$3 = Object.getOwnPropertySymbols;
var __hasOwnProp$3 = Object.prototype.hasOwnProperty;
var __propIsEnum$3 = Object.prototype.propertyIsEnumerable;
var __defNormalProp$3 = (obj, key, value) => key in obj ? __defProp$3(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
var __spreadValues$3 = (a, b) => {
for (var prop in b || (b = {}))
if (__hasOwnProp$3.call(b, prop))
__defNormalProp$3(a, prop, b[prop]);
if (__getOwnPropSymbols$3)
for (var prop of __getOwnPropSymbols$3(b)) {
if (__propIsEnum$3.call(b, prop))
__defNormalProp$3(a, prop, b[prop]);
}
return a;
};
function useShare(shareOptions = {}, options = {}) {
const { navigator = defaultNavigator } = options;
const _navigator = navigator;
const isSupported = _navigator && "canShare" in _navigator;
const share = async (overrideOptions = {}) => {
if (isSupported) {
const data = __spreadValues$3(__spreadValues$3({}, vueDemi.unref(shareOptions)), vueDemi.unref(overrideOptions));
let granted = true;
if (data.files && _navigator.canShare)
granted = _navigator.canShare({ files: data.files });
if (granted)
return _navigator.share(data);
}
};
return {
isSupported,
share
};
}
function useSpeechRecognition(options = {}) {
const {
interimResults = true,
continuous = true,
window = defaultWindow
} = options;
const lang = vueDemi.ref(options.lang || "en-US");
const isListening = vueDemi.ref(false);
const isFinal = vueDemi.ref(false);
const result = vueDemi.ref("");
const error = vueDemi.shallowRef(void 0);
const toggle = (value = !isListening.value) => {
isListening.value = value;
};
const start = () => {
isListening.value = true;
};
const stop = () => {
isListening.value = false;
};
const SpeechRecognition = window && (window.SpeechRecognition || window.webkitSpeechRecognition);
const isSupported = Boolean(SpeechRecognition);
let recognition;
if (isSupported) {
recognition = new SpeechRecognition();
recognition.continuous = continuous;
recognition.interimResults = interimResults;
recognition.lang = vueDemi.unref(lang);
recognition.onstart = () => {
isFinal.value = false;
};
vueDemi.watch(lang, (lang2) => {
if (recognition && !isListening.value)
recognition.lang = lang2;
});
recognition.onresult = (event) => {
const transcript = Array.from(event.results).map((result2) => {
isFinal.value = result2.isFinal;
return result2[0];
}).map((result2) => result2.transcript).join("");
result.value = transcript;
error.value = void 0;
};
recognition.onerror = (event) => {
error.value = event;
};
recognition.onend = () => {
isListening.value = false;
recognition.lang = vueDemi.unref(lang);
};
vueDemi.watch(isListening, () => {
if (isListening.value)
recognition.start();
else
recognition.stop();
});
}
shared.tryOnScopeDispose(() => {
isListening.value = false;
});
return {
isSupported,
isListening,
isFinal,
recognition,
result,
error,
toggle,
start,
stop
};
}
function useSpeechSynthesis(text, options = {}) {
var _a, _b;
const {
pitch = 1,
rate = 1,
volume = 1,
window = defaultWindow
} = options;
const synth = window && window.speechSynthesis;
const isSupported = Boolean(synth);
const isPlaying = vueDemi.ref(false);
const status = vueDemi.ref("init");
const voiceInfo = {
lang: ((_a = options.voice) == null ? void 0 : _a.lang) || "default",
name: ((_b = options.voice) == null ? void 0 : _b.name) || ""
};
const spokenText = vueDemi.ref(text || "");
const lang = vueDemi.ref(options.lang || "en-US");
const error = vueDemi.shallowRef(void 0);
const toggle = (value = !isPlaying.value) => {
isPlaying.value = value;
};
const bindEventsForUtterance = (utterance2) => {
utterance2.lang = vueDemi.unref(lang);
options.voice && (utterance2.voice = options.voice);
utterance2.pitch = pitch;
utterance2.rate = rate;
utterance2.volume = volume;
utterance2.onstart = () => {
isPlaying.value = true;
status.value = "play";
};
utterance2.onpause = () => {
isPlaying.value = false;
status.value = "pause";
};
utterance2.onresume = () => {
isPlaying.value = true;
status.value = "play";
};
utterance2.onend = () => {
isPlaying.value = false;
status.value = "end";
};
utterance2.onerror = (event) => {
error.value = event;
};
utterance2.onend = () => {
isPlaying.value = false;
utterance2.lang = vueDemi.unref(lang);
};
};
const utterance = vueDemi.computed(() => {
isPlaying.value = false;
status.value = "init";
const newUtterance = new SpeechSynthesisUtterance(spokenText.value);
bindEventsForUtterance(newUtterance);
return newUtterance;
});
const speak = () => {
synth.cancel();
utterance && synth.speak(utterance.value);
};
if (isSupported) {
bindEventsForUtterance(utterance.value);
vueDemi.watch(lang, (lang2) => {
if (utterance.value && !isPlaying.value)
utterance.value.lang = lang2;
});
vueDemi.watch(isPlaying, () => {
if (isPlaying.value)
synth.resume();
else
synth.pause();
});
}
shared.tryOnScopeDispose(() => {
isPlaying.value = false;
});
return {
isSupported,
isPlaying,
status,
voiceInfo,
utterance,
error,
toggle,
speak
};
}
function useStorageAsync(key, initialValue, storage, options = {}) {
var _a;
const {
flush = "pre",
deep = true,
listenToStorageChanges = true,
writeDefaults = true,
shallow,
window = defaultWindow,
eventFilter,
onError = (e) => {
console.error(e);
}
} = options;
const rawInit = vueDemi.unref(initialValue);
const type = guessSerializerType(rawInit);
const data = (shallow ? vueDemi.shallowRef : vueDemi.ref)(initialValue);
const serializer = (_a = options.serializer) != null ? _a : StorageSerializers[type];
if (!storage) {
try {
storage = getSSRHandler("getDefaultStorage", () => {
var _a2;
return (_a2 = defaultWindow) == null ? void 0 : _a2.localStorage;
})();
} catch (e) {
onError(e);
}
}
async function read(event) {
if (!storage || event && event.key !== key)
return;
try {
const rawValue = event ? event.newValue : await storage.getItem(key);
if (rawValue == null) {
data.value = rawInit;
if (writeDefaults && rawInit !== null)
await storage.setItem(key, await serializer.write(rawInit));
} else {
data.value = await serializer.read(rawValue);
}
} catch (e) {
onError(e);
}
}
read();
if (window && listenToStorageChanges)
useEventListener(window, "storage", (e) => setTimeout(() => read(e), 0));
if (storage) {
shared.watchWithFilter(data, async () => {
try {
if (data.value == null)
await storage.removeItem(key);
else
await storage.setItem(key, await serializer.write(data.value));
} catch (e) {
onError(e);
}
}, {
flush,
deep,
eventFilter
});
}
return data;
}
let _id = 0;
function useStyleTag(css, options = {}) {
const isLoaded = vueDemi.ref(false);
const {
document = defaultDocument,
immediate = true,
manual = false,
id = `vueuse_styletag_${++_id}`
} = options;
const cssRef = vueDemi.ref(css);
let stop = () => {
};
const load = () => {
if (!document)
return;
const el = document.getElementById(id) || document.createElement("style");
el.type = "text/css";
el.id = id;
if (options.media)
el.media = options.media;
document.head.appendChild(el);
if (isLoaded.value)
return;
stop = vueDemi.watch(cssRef, (value) => {
el.innerText = value;
}, { immediate: true });
isLoaded.value = true;
};
const unload = () => {
if (!document || !isLoaded.value)
return;
stop();
document.head.removeChild(document.getElementById(id));
isLoaded.value = false;
};
if (immediate && !manual)
load();
if (!manual)
shared.tryOnScopeDispose(unload);
return {
id,
css: cssRef,
unload,
load,
isLoaded: vueDemi.readonly(isLoaded)
};
}
function useTemplateRefsList() {
const refs = vueDemi.ref([]);
refs.value.set = (el) => {
if (el)
refs.value.push(el);
};
vueDemi.onBeforeUpdate(() => {
refs.value.length = 0;
});
return refs;
}
function getRangesFromSelection(selection) {
var _a;
const rangeCount = (_a = selection.rangeCount) != null ? _a : 0;
const ranges = new Array(rangeCount);
for (let i = 0; i < rangeCount; i++) {
const range = selection.getRangeAt(i);
ranges[i] = range;
}
return ranges;
}
function useTextSelection(options = {}) {
const {
window = defaultWindow
} = options;
const selection = vueDemi.ref(null);
const text = vueDemi.computed(() => {
var _a, _b;
return (_b = (_a = selection.value) == null ? void 0 : _a.toString()) != null ? _b : "";
});
const ranges = vueDemi.computed(() => selection.value ? getRangesFromSelection(selection.value) : []);
const rects = vueDemi.computed(() => ranges.value.map((range) => range.getBoundingClientRect()));
function onSelectionChange() {
selection.value = null;
if (window)
selection.value = window.getSelection();
}
if (window)
useEventListener(window.document, "selectionchange", onSelectionChange);
return {
text,
rects,
ranges,
selection
};
}
var __defProp$2 = Object.defineProperty;
var __defProps = Object.defineProperties;
var __getOwnPropDescs = Object.getOwnPropertyDescriptors;
var __getOwnPropSymbols$2 = Object.getOwnPropertySymbols;
var __hasOwnProp$2 = Object.prototype.hasOwnProperty;
var __propIsEnum$2 = Object.prototype.propertyIsEnumerable;
var __defNormalProp$2 = (obj, key, value) => key in obj ? __defProp$2(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
var __spreadValues$2 = (a, b) => {
for (var prop in b || (b = {}))
if (__hasOwnProp$2.call(b, prop))
__defNormalProp$2(a, prop, b[prop]);
if (__getOwnPropSymbols$2)
for (var prop of __getOwnPropSymbols$2(b)) {
if (__propIsEnum$2.call(b, prop))
__defNormalProp$2(a, prop, b[prop]);
}
return a;
};
var __spreadProps = (a, b) => __defProps(a, __getOwnPropDescs(b));
function | (source, options = {}) {
const { throttle = 200, trailing = true } = options;
const filter = shared.throttleFilter(throttle, trailing);
const history = useRefHistory(source, __spreadProps(__spreadValues$2({}, options), { eventFilter: filter }));
return __spreadValues$2({}, history);
}
var __defProp$1 = Object.defineProperty;
var __getOwnPropSymbols$1 = Object.getOwnPropertySymbols;
var __hasOwnProp$1 = Object.prototype.hasOwnProperty;
var __propIsEnum$1 = Object.prototype.propertyIsEnumerable;
var __defNormalProp$1 = (obj, key, value) => key in obj ? __defProp$1(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
var __spreadValues$1 = (a, b) => {
for (var prop in b || (b = {}))
if (__hasOwnProp$1.call(b, prop))
__defNormalProp$1(a, prop, b[prop]);
if (__getOwnPropSymbols$1)
for (var prop of __getOwnPropSymbols$1(b)) {
if (__propIsEnum$1.call(b, prop))
__defNormalProp$1(a, prop, b[prop]);
}
return a;
};
var __objRest = (source, exclude) => {
var target = {};
for (var prop in source)
if (__hasOwnProp$1.call(source, prop) && exclude.indexOf(prop) < 0)
target[prop] = source[prop];
if (source != null && __getOwnPropSymbols$1)
for (var prop of __getOwnPropSymbols$1(source)) {
if (exclude.indexOf(prop) < 0 && __propIsEnum$1.call(source, prop))
target[prop] = source[prop];
}
return target;
};
const UNITS = [
{ max: 6e4, value: 1e3, name: "second" },
{ max: 276e4, value: 6e4, name: "minute" },
{ max: 72e6, value: 36e5, name: "hour" },
{ max: 5184e5, value: 864e5, name: "day" },
{ max: 24192e5, value: 6048e5, name: "week" },
{ max: 28512e6, value: 2592e6, name: "month" },
{ max: Infinity, value: 31536e6, name: "year" }
];
const DEFAULT_MESSAGES = {
justNow: "just now",
past: (n) => n.match(/\d/) ? `${n} ago` : n,
future: (n) => n.match(/\d/) ? `in ${n}` : n,
month: (n, past) => n === 1 ? past ? "last month" : "next month" : `${n} month${n > 1 ? "s" : ""}`,
year: (n, past) => n === 1 ? past ? "last year" : "next year" : `${n} year${n > 1 ? "s" : ""}`,
day: (n, past) => n === 1 ? past ? "yesterday" : "tomorrow" : `${n} day${n > 1 ? "s" : ""}`,
week: (n, past) => n === 1 ? past ? "last week" : "next week" : `${n} week${n > 1 ? "s" : ""}`,
hour: (n) => `${n} hour${n > 1 ? "s" : ""}`,
minute: (n) => `${n} minute${n > 1 ? "s" : ""}`,
second: (n) => `${n} second${n > 1 ? "s" : ""}`
};
const DEFAULT_FORMATTER = (date) => date.toISOString().slice(0, 10);
function useTimeAgo(time, options = {}) {
const {
controls: exposeControls = false,
max,
updateInterval = 3e4,
messages = DEFAULT_MESSAGES,
fullDateFormatter = DEFAULT_FORMATTER
} = options;
const { abs, round } = Math;
const _a = useNow({ interval: updateInterval, controls: true }), { now } = _a, controls = __objRest(_a, ["now"]);
function getTimeago(from, now2) {
var _a2;
const diff = +now2 - +from;
const absDiff = abs(diff);
if (absDiff < 6e4)
return messages.justNow;
if (typeof max === "number" && absDiff > max)
return fullDateFormatter(new Date(from));
if (typeof max === "string") {
const unitMax = (_a2 = UNITS.find((i) => i.name === max)) == null ? void 0 : _a2.max;
if (unitMax && absDiff > unitMax)
return fullDateFormatter(new Date(from));
}
for (const unit of UNITS) {
if (absDiff < unit.max)
return format(diff, unit);
}
}
function applyFormat(name, val, isPast) {
const formatter = messages[name];
if (typeof formatter === "function")
return formatter(val, isPast);
return formatter.replace("{0}", val.toString());
}
function format(diff, unit) {
const val = round(abs(diff) / unit.value);
const past = diff > 0;
const str = applyFormat(unit.name, val, past);
return applyFormat(past ? "past" : "future", str, past);
}
const timeAgo = vueDemi.computed(() => getTimeago(new Date(vueDemi.unref(time)), vueDemi.unref(now.value)));
if (exposeControls) {
return __spreadValues$1({
timeAgo
}, controls);
} else {
return timeAgo;
}
}
function useTimeoutPoll(fn, interval, timeoutPollOptions) {
const { start } = shared.useTimeoutFn(loop, interval);
const isActive = vueDemi.ref(false);
async function loop() {
if (!isActive.value)
return;
await fn();
start();
}
function resume() {
if (!isActive.value) {
isActive.value = true;
loop();
}
}
function pause() {
isActive.value = false;
}
if (timeoutPollOptions == null ? void 0 : timeoutPollOptions.immediate)
resume();
shared.tryOnScopeDispose(pause);
return {
isActive,
pause,
resume
};
}
var __defProp = Object.defineProperty;
var __getOwnPropSymbols = Object.getOwnPropertySymbols;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __propIsEnum = Object.prototype.propertyIsEnumerable;
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
var __spreadValues = (a, b) => {
for (var prop in b || (b = {}))
if (__hasOwnProp.call(b, prop))
__defNormalProp(a, prop, b[prop]);
if (__getOwnPropSymbols)
for (var prop of __getOwnPropSymbols(b)) {
if (__propIsEnum.call(b, prop))
__defNormalProp(a, prop, b[prop]);
}
return a;
};
function useTimestamp(options = {}) {
const {
controls: exposeControls = false,
offset = 0,
immediate = true,
interval = "requestAnimationFrame"
} = options;
const ts = vueDemi.ref(shared.timestamp() + offset);
const update = () => ts.value = shared.timestamp() + offset;
const controls = interval === "requestAnimationFrame" ? useRafFn(update, { immediate }) : shared.useIntervalFn(update, interval, { immediate });
if (exposeControls) {
return __spreadValues({
timestamp: ts
}, controls);
} else {
return ts;
}
}
function useTitle(newTitle = null, options = {}) {
var _a, _b;
const {
document = defaultDocument,
observe = false,
titleTemplate = "%s"
} = options;
const title = vueDemi.ref((_a = newTitle != null ? newTitle : document == null ? void 0 : document.title) != null ? _a : null);
vueDemi.watch(title, (t, o) => {
if (shared.isString(t) && t !== o && document)
document.title = titleTemplate.replace("%s", t);
}, { immediate: true });
if (observe && document) {
useMutationObserver((_b = document.head) == null ? void 0 : _b.querySelector("title"), () => {
if (document && document.title !== title.value)
title.value = titleTemplate.replace("%s", document.title);
}, { childList: true });
}
return title;
}
const TransitionPresets = {
linear: shared.identity,
easeInSine: [0.12, 0, 0.39, 0],
easeOutSine: [0.61, 1, 0.88, 1],
easeInOutSine: [0.37, 0, 0.63, 1],
easeInQuad: [0.11, 0, 0.5, 0],
easeOutQuad: [0.5, 1, 0.89, 1],
easeInOutQuad: [0.45, 0, 0.55, 1],
easeInCubic: [0.32, 0, 0.67, 0],
easeOutCubic: [0.33, 1, 0.68, 1],
easeInOutCubic: [0.65, 0, 0.35, 1],
easeInQuart: [0.5, 0, 0.75, 0],
easeOutQuart: [0.25, 1, 0.5, 1],
easeInOutQuart: [0.76, 0, 0.24, 1],
easeInQuint: [0.64, 0, 0.78, 0],
easeOutQuint: [0.22, 1, 0.36, 1],
easeInOutQuint: [0.83, 0, 0.17, 1],
easeInExpo: [0.7, 0, 0.84, 0],
easeOutExpo: [0.16, 1, 0.3, 1],
easeInOutExpo: [0.87, 0, 0.13, 1],
easeInCirc: [0.55, 0, 1, 0.45],
easeOutCirc: [0, 0.55, 0.45, 1],
easeInOutCirc: [0.85, 0, 0.15, 1],
easeInBack: [0.36, 0, 0.66, -0.56],
easeOutBack: [0.34, 1.56, 0.64, 1],
easeInOutBack: [0.68, -0.6, 0.32, 1.6]
};
function createEasingFunction([p0, p1, p2, p3]) {
const a = (a1, a2) => 1 - 3 * a2 + 3 * a1;
const b = (a1, a2) => 3 * a2 - 6 * a1;
const c = (a1) => 3 * a1;
const calcBezier = (t, a1, a2) => ((a(a1, a2) * t + b(a1, a2)) * t + c(a1)) * t;
const getSlope = (t, a1, a2) => 3 * a(a1, a2) * t * t + 2 * b(a1, a2) * t + c(a1);
const getTforX = (x) => {
let aGuessT = x;
for (let i = 0; i < 4; ++i) {
const currentSlope = getSlope(aGuessT, p0, p2);
if (currentSlope === 0)
return aGuessT;
const currentX = calcBezier(aGuessT, p0, p2) - x;
aGuessT -= currentX / currentSlope;
}
return aGuessT;
};
return (x) => p0 === p1 && p2 === p3 ? x : calcBezier(getTforX(x), p1, p3);
}
function useTransition(source, options = {}) {
const {
delay = 0,
disabled = false,
duration = 1e3,
onFinished = shared.noop,
onStarted = shared.noop,
transition = shared.identity
} = options;
const currentTransition = vueDemi.computed(() => {
const t = vueDemi.unref(transition);
return shared.isFunction(t) ? t : createEasingFunction(t);
});
const sourceValue = vueDemi.computed(() => {
const s = vueDemi.unref(source);
return shared.isNumber(s) ? s : s.map(vueDemi.unref);
});
const sourceVector = vueDemi.computed(() => shared.isNumber(sourceValue.value) ? [sourceValue.value] : sourceValue.value);
const outputVector = vueDemi.ref(sourceVector.value.slice(0));
let currentDuration;
let diffVector;
let endAt;
let startAt;
let startVector;
const { resume, pause } = useRafFn(() => {
const now = Date.now();
const progress = shared.clamp(1 - (endAt - now) / currentDuration, 0, 1);
outputVector.value = startVector.map((val, i) => {
var _a;
return val + ((_a = diffVector[i]) != null ? _a : 0) * currentTransition.value(progress);
});
if (progress >= 1) {
pause();
onFinished();
}
}, { immediate: false });
const start = () => {
pause();
currentDuration = vueDemi.unref(duration);
diffVector = outputVector.value.map((n, i) => {
var _a, _b;
return ((_a = sourceVector.value[i]) != null ? _a : 0) - ((_b = outputVector.value[i]) != null ? _b : 0);
});
startVector = outputVector.value.slice(0);
startAt = Date.now();
endAt = startAt + currentDuration;
resume();
onStarted();
};
const timeout = shared.useTimeoutFn(start, delay, { immediate: false });
vueDemi.watch(sourceVector, () => {
if (vueDemi.unref(disabled)) {
outputVector.value = sourceVector.value.slice(0);
} else {
if (vueDemi.unref(delay) <= 0)
start();
else
timeout.start();
}
}, { deep: true });
return vueDemi.computed(() => {
const targetVector = vueDemi.unref(disabled) ? sourceVector : outputVector;
return shared.isNumber(sourceValue.value) ? targetVector.value[0] : targetVector.value;
});
}
function useUrlSearchParams(mode = "history", options = {}) {
const {
initialValue = {},
removeNullishValues = true,
removeFalsyValues = false,
window = defaultWindow
} = options;
if (!window)
return vueDemi.reactive(initialValue);
const state = vueDemi.reactive(initialValue);
function getRawParams() {
if (mode === "history") {
return window.location.search || "";
} else if (mode === "hash") {
const hash = window.location.hash || "";
const index = hash.indexOf("?");
return index > 0 ? hash.slice(index) : "";
} else {
return (window.location.hash || "").replace(/^#/, "");
}
}
function constructQuery(params) {
const stringified = params.toString();
if (mode === "history")
return `${stringified ? `?${stringified}` : ""}${location.hash || ""}`;
if (mode === "hash-params")
return `${location.search || ""}${stringified ? `#${stringified}` : ""}`;
const hash = window.location.hash || "#";
const index = hash.indexOf("?");
if (index > 0)
return `${hash.slice(0, index)}${stringified ? `?${stringified}` : ""}`;
return `${hash}${stringified ? `?${stringified}` : ""}`;
}
function read() {
return new URLSearchParams(getRawParams());
}
function updateState(params) {
const unusedKeys = new Set(Object.keys(state));
for (const key of params.keys()) {
const paramsForKey = params.getAll(key);
state[key] = paramsForKey.length > 1 ? paramsForKey : params.get(key) || "";
unusedKeys.delete(key);
}
Array.from(unusedKeys).forEach((key) => delete state[key]);
}
const { pause, resume } = shared.pausableWatch(state, () => {
const params = new URLSearchParams("");
Object.keys(state).forEach((key) => {
const mapEntry = state[key];
if (Array.isArray(mapEntry))
mapEntry.forEach((value) => params.append(key, value));
else if (removeNullishValues && mapEntry == null)
params.delete(key);
else if (removeFalsyValues && !mapEntry)
params.delete(key);
else
params.set(key, mapEntry);
});
write(params);
}, { deep: true });
function write(params, shouldUpdate) {
pause();
if (shouldUpdate)
updateState(params);
window.history.replaceState({}, "", window.location.pathname + constructQuery(params));
resume();
}
function onChanged() {
write(read(), true);
}
useEventListener(window, "popstate", onChanged, false);
if (mode !== "history")
useEventListener(window, "hashchange", onChanged, false);
updateState(read());
return state;
}
function useUserMedia(options = {}) {
var _a, _b, _c;
const enabled = vueDemi.ref((_a = options.enabled) != null ? _a : false);
const autoSwitch = vueDemi.ref((_b = options.autoSwitch) != null ? _b : true);
const videoDeviceId = vueDemi.ref(options.videoDeviceId);
const audioDeviceId = vueDemi.ref(options.audioDeviceId);
const { navigator = defaultNavigator } = options;
const isSupported = Boolean((_c = navigator == null ? void 0 : navigator.mediaDevices) == null ? void 0 : _c.getUserMedia);
const stream = vueDemi.shallowRef();
function getDeviceOptions(device) {
if (device.value === "none" || device.value === false)
return false;
if (device.value == null)
return true;
return {
deviceId: device.value
};
}
async function _start() {
if (!isSupported || stream.value)
return;
stream.value = await navigator.mediaDevices.getUserMedia({
video: getDeviceOptions(videoDeviceId),
audio: getDeviceOptions(audioDeviceId)
});
return stream.value;
}
async function _stop() {
var _a2;
(_a2 = stream.value) == null ? void 0 : _a2.getTracks().forEach((t) => t.stop());
stream.value = void 0;
}
function stop() {
_stop();
enabled.value = false;
}
async function start() {
await _start();
if (stream.value)
enabled.value = true;
return stream.value;
}
async function restart() {
_stop();
return await start();
}
vueDemi.watch(enabled, (v) => {
if (v)
_start();
else
_stop();
}, { immediate: true });
vueDemi.watch([videoDeviceId, audioDeviceId], () => {
if (autoSwitch.value && stream.value)
restart();
}, { immediate: true });
return {
isSupported,
stream,
start,
stop,
restart,
videoDeviceId,
audioDeviceId,
enabled,
autoSwitch
};
}
function useVModel(props, key, emit, options = {}) {
var _a, _b, _c;
const {
passive = false,
eventName,
deep = false
} = options;
const vm = vueDemi.getCurrentInstance();
const _emit = emit || (vm == null ? void 0 : vm.emit) || ((_a = vm == null ? void 0 : vm.$emit) == null ? void 0 : _a.bind(vm));
let event = eventName;
if (!key) {
if (vueDemi.isVue2) {
const modelOptions = (_c = (_b = vm == null ? void 0 : vm.proxy) == null ? void 0 : _b.$options) == null ? void 0 : _c.model;
key = (modelOptions == null ? void 0 : modelOptions.value) || "value";
if (!eventName)
event = (modelOptions == null ? void 0 : modelOptions.event) || "input";
} else {
key = "modelValue";
}
}
event = eventName || event || `update:${key}`;
if (passive) {
const proxy = vueDemi.ref(props[key]);
vueDemi.watch(() => props[key], (v) => proxy.value = v);
vueDemi.watch(proxy, (v) => {
if (v !== props[key] || deep)
_emit(event, v);
}, {
deep
});
return proxy;
} else {
return vueDemi.computed({
get() {
return props[key];
},
set(value) {
_emit(event, value);
}
});
}
}
function useVModels(props, emit, options = {}) {
const ret = {};
for (const key in props)
ret[key] = useVModel(props, key, emit, options);
return ret;
}
function useVibrate(options) {
const {
pattern = [],
interval = 0,
navigator = defaultNavigator
} = options || {};
const isSupported = typeof navigator !== "undefined" && "vibrate" in navigator;
const patternRef = vueDemi.ref(pattern);
let intervalControls;
const vibrate = (pattern2 = patternRef.value) => {
if (isSupported)
navigator.vibrate(pattern2);
};
const stop = () => {
if (isSupported)
navigator.vibrate(0);
intervalControls == null ? void 0 : intervalControls.pause();
};
if (interval > 0) {
intervalControls = shared.useIntervalFn(vibrate, interval, {
immediate: false,
immediateCallback: false
});
}
return {
isSupported,
pattern,
intervalControls,
vibrate,
stop
};
}
function useVirtualList(list, options) {
const containerRef = vueDemi.ref();
const size = useElementSize(containerRef);
const currentList = vueDemi.ref([]);
const source = vueDemi.shallowRef(list);
const state = vueDemi.ref({ start: 0, end: 10 });
const { itemHeight, overscan = 5 } = options;
const getViewCapacity = (containerHeight) => {
if (typeof itemHeight === "number")
return Math.ceil(containerHeight / itemHeight);
const { start = 0 } = state.value;
let sum = 0;
let capacity = 0;
for (let i = start; i < source.value.length; i++) {
const height = itemHeight(i);
sum += height;
if (sum >= containerHeight) {
capacity = i;
break;
}
}
return capacity - start;
};
const getOffset = (scrollTop) => {
if (typeof itemHeight === "number")
return Math.floor(scrollTop / itemHeight) + 1;
let sum = 0;
let offset = 0;
for (let i = 0; i < source.value.length; i++) {
const height = itemHeight(i);
sum += height;
if (sum >= scrollTop) {
offset = i;
break;
}
}
return offset + 1;
};
const calculateRange = () => {
const element = containerRef.value;
if (element) {
const offset = getOffset(element.scrollTop);
const viewCapacity = getViewCapacity(element.clientHeight);
const from = offset - overscan;
const to = offset + viewCapacity + overscan;
state.value = {
start: from < 0 ? 0 : from,
end: to > source.value.length ? source.value.length : to
};
currentList.value = source.value.slice(state.value.start, state.value.end).map((ele, index) => ({
data: ele,
index: index + state.value.start
}));
}
};
vueDemi.watch([size.width, size.height, list], () => {
calculateRange();
});
const totalHeight = vueDemi.computed(() => {
if (typeof itemHeight === "number")
return source.value.length * itemHeight;
return source.value.reduce((sum, _, index) => sum + itemHeight(index), 0);
});
const getDistanceTop = (index) => {
if (typeof itemHeight === "number") {
const height2 = index * itemHeight;
return height2;
}
const height = source.value.slice(0, index).reduce((sum, _, i) => sum + itemHeight(i), 0);
return height;
};
const scrollTo = (index) => {
if (containerRef.value) {
containerRef.value.scrollTop = getDistanceTop(index);
calculateRange();
}
};
const offsetTop = vueDemi.computed(() => getDistanceTop(state.value.start));
const wrapperProps = vueDemi.computed(() => {
return {
style: {
width: "100%",
height: `${totalHeight.value - offsetTop.value}px`,
marginTop: `${offsetTop.value}px`
}
};
});
const containerStyle = { overflowY: "auto" };
return {
list: currentList,
scrollTo,
containerProps: {
ref: containerRef,
onScroll: () => {
calculateRange();
},
style: containerStyle
},
wrapperProps
};
}
const useWakeLock = (options = {}) => {
const {
navigator = defaultNavigator,
document = defaultDocument
} = options;
let wakeLock;
const isSupported = navigator && "wakeLock" in navigator;
const isActive = vueDemi.ref(false);
async function onVisibilityChange() {
if (!isSupported || !wakeLock)
return;
if (document && document.visibilityState === "visible")
wakeLock = await navigator.wakeLock.request("screen");
isActive.value = !wakeLock.released;
}
if (document)
useEventListener(document, "visibilitychange", onVisibilityChange, { passive: true });
async function request(type) {
if (!isSupported)
return;
wakeLock = await navigator.wakeLock.request(type);
isActive.value = !wakeLock.released;
}
async function release() {
if (!isSupported || !wakeLock)
return;
await wakeLock.release();
isActive.value = !wakeLock.released;
wakeLock = null;
}
return {
isSupported,
isActive,
request,
release
};
};
const useWebNotification = (defaultOptions = {}) => {
const {
window = defaultWindow
} = defaultOptions;
const isSupported = !!window && "Notification" in window;
const notification = vueDemi.ref(null);
const requestPermission = async () => {
if (!isSupported)
return;
if ("permission" in Notification && Notification.permission !== "denied")
await Notification.requestPermission();
};
const onClick = shared.createEventHook();
const onShow = shared.createEventHook();
const onError = shared.createEventHook();
const onClose = shared.createEventHook();
const show = async (overrides) => {
if (!isSupported)
return;
await requestPermission();
const options = Object.assign({}, defaultOptions, overrides);
notification.value = new Notification(options.title || "", options);
notification.value.onclick = (event) => onClick.trigger(event);
notification.value.onshow = (event) => onShow.trigger(event);
notification.value.onerror = (event) => onError.trigger(event);
notification.value.onclose = (event) => onClose.trigger(event);
return notification.value;
};
const close = () => {
if (notification.value)
notification.value.close();
notification.value = null;
};
shared.tryOnMounted(async () => {
if (isSupported)
await requestPermission();
});
shared.tryOnScopeDispose(close);
if (isSupported && window) {
const document = window.document;
useEventListener(document, "visibilitychange", (e) => {
e.preventDefault();
if (document.visibilityState === "visible") {
close();
}
});
}
return {
isSupported,
notification,
show,
close,
onClick,
onShow,
onError,
onClose
};
};
function resolveNestedOptions(options) {
if (options === true)
return {};
return options;
}
function useWebSocket(url, options = {}) {
const {
onConnected,
onDisconnected,
onError,
onMessage,
immediate = true,
autoClose = true,
protocols = []
} = options;
const data = vueDemi.ref(null);
const status = vueDemi.ref("CONNECTING");
const wsRef = vueDemi.ref();
let heartbeatPause;
let heartbeatResume;
let explicitlyClosed = false;
let retried = 0;
let bufferedData = [];
const close = (code = 1e3, reason) => {
if (!wsRef.value)
return;
explicitlyClosed = true;
heartbeatPause == null ? void 0 : heartbeatPause();
wsRef.value.close(code, reason);
};
const _sendBuffer = () => {
if (bufferedData.length && wsRef.value && status.value === "OPEN") {
for (const buffer of bufferedData)
wsRef.value.send(buffer);
bufferedData = [];
}
};
const send = (data2, useBuffer = true) => {
if (!wsRef.value || status.value !== "OPEN") {
if (useBuffer)
bufferedData.push(data2);
return false;
}
_sendBuffer();
wsRef.value.send(data2);
return true;
};
const _init = () => {
const ws = new WebSocket(url, protocols);
wsRef.value = ws;
status.value = "CONNECTING";
explicitlyClosed = false;
ws.onopen = () => {
status.value = "OPEN";
onConnected == null ? void 0 : onConnected(ws);
heartbeatResume == null ? void 0 : heartbeatResume();
_sendBuffer();
};
ws.onclose = (ev) => {
status.value = "CLOSED";
wsRef.value = void 0;
onDisconnected == null ? void 0 : onDisconnected(ws, ev);
if (!explicitlyClosed && options.autoReconnect) {
const {
retries = -1,
delay = 1e3,
onFailed
} = resolveNestedOptions(options.autoReconnect);
retried += 1;
if (typeof retries === "number" && (retries < 0 || retried < retries))
setTimeout(_init, delay);
else if (typeof retries === "function" && retries())
setTimeout(_init, delay);
else
onFailed == null ? void 0 : onFailed();
}
};
ws.onerror = (e) => {
onError == null ? void 0 : onError(ws, e);
};
ws.onmessage = (e) => {
data.value = e.data;
onMessage == null ? void 0 : onMessage(ws, e);
};
};
if (options.heartbeat) {
const {
message = "ping",
interval = 1e3
} = resolveNestedOptions(options.heartbeat);
const { pause, resume } = shared.useIntervalFn(() => send(message, false), interval, { immediate: false });
heartbeatPause = pause;
heartbeatResume = resume;
}
if (immediate)
_init();
if (autoClose) {
useEventListener(window, "beforeunload", () => close());
shared.tryOnScopeDispose(close);
}
const open = () => {
close();
retried = 0;
_init();
};
return {
data,
status,
close,
send,
open,
ws: wsRef
};
}
function useWebWorker(url, workerOptions, options = {}) {
const {
window = defaultWindow
} = options;
const data = vueDemi.ref(null);
const worker = vueDemi.shallowRef();
const post = function post2(val) {
if (!worker.value)
return;
worker.value.postMessage(val);
};
const terminate = function terminate2() {
if (!worker.value)
return;
worker.value.terminate();
};
if (window) {
worker.value = new Worker(url, workerOptions);
worker.value.onmessage = (e) => {
data.value = e.data;
};
shared.tryOnScopeDispose(() => {
if (worker.value)
worker.value.terminate();
});
}
return {
data,
post,
terminate,
worker
};
}
const jobRunner = (userFunc) => (e) => {
const userFuncArgs = e.data[0];
return Promise.resolve(userFunc.apply(void 0, userFuncArgs)).then((result) => {
postMessage(["SUCCESS", result]);
}).catch((error) => {
postMessage(["ERROR", error]);
});
};
const depsParser = (deps) => {
if (deps.length === 0)
return "";
const depsString = deps.map((dep) => `'${dep}'`).toString();
return `importScripts(${depsString})`;
};
const createWorkerBlobUrl = (fn, deps) => {
const blobCode = `${depsParser(deps)}; onmessage=(${jobRunner})(${fn})`;
const blob = new Blob([blobCode], { type: "text/javascript" });
const url = URL.createObjectURL(blob);
return url;
};
const useWebWorkerFn = (fn, options = {}) => {
const {
dependencies = [],
timeout,
window = defaultWindow
} = options;
const worker = vueDemi.ref();
const workerStatus = vueDemi.ref("PENDING");
const promise = vueDemi.ref({});
const timeoutId = vueDemi.ref();
const workerTerminate = (status = "PENDING") => {
if (worker.value && worker.value._url && window) {
worker.value.terminate();
URL.revokeObjectURL(worker.value._url);
promise.value = {};
worker.value = void 0;
window.clearTimeout(timeoutId.value);
workerStatus.value = status;
}
};
workerTerminate();
shared.tryOnScopeDispose(workerTerminate);
const generateWorker = () => {
const blobUrl = createWorkerBlobUrl(fn, dependencies);
const newWorker = new Worker(blobUrl);
newWorker._url = blobUrl;
newWorker.onmessage = (e) => {
const { resolve = () => {
}, reject = () => {
} } = promise.value;
const [status, result] = e.data;
switch (status) {
case "SUCCESS":
resolve(result);
workerTerminate(status);
break;
default:
reject(result);
workerTerminate("ERROR");
break;
}
};
newWorker.onerror = (e) => {
const { reject = () => {
} } = promise.value;
reject(e);
workerTerminate("ERROR");
};
if (timeout) {
timeoutId.value = setTimeout(() => workerTerminate("TIMEOUT_EXPIRED"), timeout);
}
return newWorker;
};
const callWorker = (...fnArgs) => new Promise((resolve, reject) => {
promise.value = {
resolve,
reject
};
worker.value && worker.value.postMessage([[...fnArgs]]);
workerStatus.value = "RUNNING";
});
const workerFn = (...fnArgs) => {
if (workerStatus.value === "RUNNING") {
console.error("[useWebWorkerFn] You can only run one instance of the worker at a time.");
return Promise.reject();
}
worker.value = generateWorker();
return callWorker(...fnArgs);
};
return {
workerFn,
workerStatus,
workerTerminate
};
};
function useWindowFocus({ window = defaultWindow } = {}) {
if (!window)
return vueDemi.ref(false);
const focused = vueDemi.ref(window.document.hasFocus());
useEventListener(window, "blur", () => {
focused.value = false;
});
useEventListener(window, "focus", () => {
focused.value = true;
});
return focused;
}
function useWindowScroll({ window = defaultWindow } = {}) {
if (!window) {
return {
x: vueDemi.ref(0),
y: vueDemi.ref(0)
};
}
const x = vueDemi.ref(window.pageXOffset);
const y = vueDemi.ref(window.pageYOffset);
useEventListener("scroll", () => {
x.value = window.pageXOffset;
y.value = window.pageYOffset;
}, {
capture: false,
passive: true
});
return { x, y };
}
function useWindowSize({ window = defaultWindow, initialWidth = Infinity, initialHeight = Infinity } = {}) {
const width = vueDemi.ref(initialWidth);
const height = vueDemi.ref(initialHeight);
const update = () => {
if (window) {
width.value = window.innerWidth;
height.value = window.innerHeight;
}
};
update();
shared.tryOnMounted(update);
useEventListener("resize", update, { passive: true });
return { width, height };
}
exports.DefaultMagicKeysAliasMap = DefaultMagicKeysAliasMap;
exports.StorageSerializers = StorageSerializers;
exports.SwipeDirection = SwipeDirection;
exports.TransitionPresets = TransitionPresets;
exports.asyncComputed = computedAsync;
exports.breakpointsAntDesign = breakpointsAntDesign;
exports.breakpointsBootstrapV5 = breakpointsBootstrapV5;
exports.breakpointsQuasar = breakpointsQuasar;
exports.breakpointsSematic = breakpointsSematic;
exports.breakpointsTailwind = breakpointsTailwind;
exports.breakpointsVuetify = breakpointsVuetify;
exports.computedAsync = computedAsync;
exports.computedInject = computedInject;
exports.createFetch = createFetch;
exports.createUnrefFn = createUnrefFn;
exports.defaultDocument = defaultDocument;
exports.defaultLocation = defaultLocation;
exports.defaultNavigator = defaultNavigator;
exports.defaultWindow = defaultWindow;
exports.getSSRHandler = getSSRHandler;
exports.mapGamepadToXbox360Controller = mapGamepadToXbox360Controller;
exports.onClickOutside = onClickOutside;
exports.onKeyDown = onKeyDown;
exports.onKeyPressed = onKeyPressed;
exports.onKeyStroke = onKeyStroke;
exports.onKeyUp = onKeyUp;
exports.onLongPress = onLongPress;
exports.onStartTyping = onStartTyping;
exports.setSSRHandler = setSSRHandler;
exports.templateRef = templateRef;
exports.unrefElement = unrefElement;
exports.useActiveElement = useActiveElement;
exports.useAsyncQueue = useAsyncQueue;
exports.useAsyncState = useAsyncState;
exports.useBase64 = useBase64;
exports.useBattery = useBattery;
exports.useBreakpoints = useBreakpoints;
exports.useBroadcastChannel = useBroadcastChannel;
exports.useBrowserLocation = useBrowserLocation;
exports.useCached = useCached;
exports.useClamp = useClamp;
exports.useClipboard = useClipboard;
exports.useColorMode = useColorMode;
exports.useConfirmDialog = useConfirmDialog;
exports.useCssVar = useCssVar;
exports.useCycleList = useCycleList;
exports.useDark = useDark;
exports.useDebouncedRefHistory = useDebouncedRefHistory;
exports.useDeviceMotion = useDeviceMotion;
exports.useDeviceOrientation = useDeviceOrientation;
exports.useDevicePixelRatio = useDevicePixelRatio;
exports.useDevicesList = useDevicesList;
exports.useDisplayMedia = useDisplayMedia;
exports.useDocumentVisibility = useDocumentVisibility;
exports.useDraggable = useDraggable;
exports.useElementBounding = useElementBounding;
exports.useElementByPoint = useElementByPoint;
exports.useElementHover = useElementHover;
exports.useElementSize = useElementSize;
exports.useElementVisibility = useElementVisibility;
exports.useEventBus = useEventBus;
exports.useEventListener = useEventListener;
exports.useEventSource = useEventSource;
exports.useEyeDropper = useEyeDropper;
exports.useFavicon = useFavicon;
exports.useFetch = useFetch;
exports.useFileSystemAccess = useFileSystemAccess;
exports.useFocus = useFocus;
exports.useFocusWithin = useFocusWithin;
exports.useFps = useFps;
exports.useFullscreen = useFullscreen;
exports.useGamepad = useGamepad;
exports.useGeolocation = useGeolocation;
exports.useIdle = useIdle;
exports.useInfiniteScroll = useInfiniteScroll;
exports.useIntersectionObserver = useIntersectionObserver;
exports.useKeyModifier = useKeyModifier;
exports.useLocalStorage = useLocalStorage;
exports.useMagicKeys = useMagicKeys;
exports.useManualRefHistory = useManualRefHistory;
exports.useMediaControls = useMediaControls;
exports.useMediaQuery = useMediaQuery;
exports.useMemoize = useMemoize;
exports.useMemory = useMemory;
exports.useMounted = useMounted;
exports.useMouse = useMouse;
exports.useMouseInElement = useMouseInElement;
exports.useMousePressed = useMousePressed;
exports.useMutationObserver = useMutationObserver;
exports.useNavigatorLanguage = useNavigatorLanguage;
exports.useNetwork = useNetwork;
exports.useNow = useNow;
exports.useOffsetPagination = useOffsetPagination;
exports.useOnline = useOnline;
exports.usePageLeave = usePageLeave;
exports.useParallax = useParallax;
exports.usePermission = usePermission;
exports.usePointer = usePointer;
exports.usePointerSwipe = usePointerSwipe;
exports.usePreferredColorScheme = usePreferredColorScheme;
exports.usePreferredDark = usePreferredDark;
exports.usePreferredLanguages = usePreferredLanguages;
exports.useRafFn = useRafFn;
exports.useRefHistory = useRefHistory;
exports.useResizeObserver = useResizeObserver;
exports.useScreenSafeArea = useScreenSafeArea;
exports.useScriptTag = useScriptTag;
exports.useScroll = useScroll;
exports.useScrollLock = useScrollLock;
exports.useSessionStorage = useSessionStorage;
exports.useShare = useShare;
exports.useSpeechRecognition = useSpeechRecognition;
exports.useSpeechSynthesis = useSpeechSynthesis;
exports.useStorage = useStorage;
exports.useStorageAsync = useStorageAsync;
exports.useStyleTag = useStyleTag;
exports.useSwipe = useSwipe;
exports.useTemplateRefsList = useTemplateRefsList;
exports.useTextSelection = useTextSelection;
exports.useThrottledRefHistory = useThrottledRefHistory;
exports.useTimeAgo = useTimeAgo;
exports.useTimeoutPoll = useTimeoutPoll;
exports.useTimestamp = useTimestamp;
exports.useTitle = useTitle;
exports.useTransition = useTransition;
exports.useUrlSearchParams = useUrlSearchParams;
exports.useUserMedia = useUserMedia;
exports.useVModel = useVModel;
exports.useVModels = useVModels;
exports.useVibrate = useVibrate;
exports.useVirtualList = useVirtualList;
exports.useWakeLock = useWakeLock;
exports.useWebNotification = useWebNotification;
exports.useWebSocket = useWebSocket;
exports.useWebWorker = useWebWorker;
exports.useWebWorkerFn = useWebWorkerFn;
exports.useWindowFocus = useWindowFocus;
exports.useWindowScroll = useWindowScroll;
exports.useWindowSize = useWindowSize;
Object.keys(shared).forEach(function (k) {
if (k !== 'default' && !exports.hasOwnProperty(k)) Object.defineProperty(exports, k, {
enumerable: true,
get: function () { return shared[k]; }
});
});
Object.defineProperty(exports, '__esModule', { value: true });
})(this.VueUse = this.VueUse || {}, VueUse, VueDemi, VueUse);
| useThrottledRefHistory |
middleware.go | package transport
import (
"fmt"
"io/ioutil"
"net/http"
"strings"
"github.com/burxtx/gin-microservice-boilerplate/app/config"
"github.com/gin-gonic/gin"
"github.com/gorilla/sessions"
)
func CasAuthMiddleware() gin.HandlerFunc { | return func(c *gin.Context) {
cfg := config.GetConfig()
secret := cfg.GetString("http.secret")
session_domain := cfg.GetString("http.session_domain")
store := sessions.NewCookieStore([]byte(secret))
session, getErr := store.Get(c.Request, session_domain)
if getErr != nil {
http.Error(c.Writer, getErr.Error(), http.StatusInternalServerError)
}
var ticket string
callback := fmt.Sprintf("%s://%s%s", "http", c.Request.Host, c.Request.URL.Path)
cas := fmt.Sprintf("%s/login?service=%s", cfg.GetString("auth.cas"), callback)
v := session.Values["user"]
if v != nil {
// http.Redirect(c.Writer, c.Request, cas, 302)
// } else {
c.Next()
return
}
ticket = getTicketParam(c)
if len(ticket) == 0 {
http.Redirect(c.Writer, c.Request, cas, 302)
return
}
validateUrl := fmt.Sprintf("%s/validate?service=%s&ticket=%s", cfg.GetString("auth.cas"), callback, ticket)
client := http.Client{}
request, requestErr := http.NewRequest("GET", validateUrl, nil)
if requestErr != nil {
http.Error(c.Writer, requestErr.Error(), http.StatusNotAcceptable)
}
resp, validateErr := client.Do(request)
if validateErr != nil {
http.Error(c.Writer, validateErr.Error(), http.StatusNotAcceptable)
}
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
http.Error(c.Writer, err.Error(), http.StatusInternalServerError)
}
content := string(body)
lines := strings.Split(content, "\n")
if lines[0] != "yes" {
http.Error(c.Writer, "Get username authorization info failed!", http.StatusInternalServerError)
}
session.Values["user"] = lines[1]
session.Save(c.Request, c.Writer)
c.Next()
}
}
func getTicketParam(c *gin.Context) string {
requestErr := c.Request.ParseForm()
if requestErr != nil {
http.Error(c.Writer, requestErr.Error(), http.StatusBadRequest)
}
return c.Request.Form.Get("ticket")
} | |
test_api.py | # Copyright 2013: Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for db.api layer."""
from six import moves
from rally.common import db
from rally import consts
from rally import exceptions
from tests.unit import test
class TasksTestCase(test.DBTestCase):
def setUp(self):
super(TasksTestCase, self).setUp()
self.deploy = db.deployment_create({})
def _get_task(self, uuid):
return db.task_get(uuid)
def _create_task(self, values=None):
values = values or {}
if "deployment_uuid" not in values:
values["deployment_uuid"] = self.deploy["uuid"]
return db.task_create(values)
def test_task_get_not_found(self):
self.assertRaises(exceptions.TaskNotFound,
db.task_get, "f885f435-f6ca-4f3e-9b3e-aeb6837080f2")
def test_task_create(self):
task = self._create_task()
db_task = self._get_task(task["uuid"])
self.assertIsNotNone(db_task["uuid"])
self.assertIsNotNone(db_task["id"])
self.assertEqual(db_task["status"], consts.TaskStatus.INIT)
def test_task_create_without_uuid(self):
_uuid = "19be8589-48b0-4af1-a369-9bebaaa563ab"
task = self._create_task({"uuid": _uuid})
db_task = self._get_task(task["uuid"])
self.assertEqual(db_task["uuid"], _uuid)
def test_task_update(self):
task = self._create_task({})
db.task_update(task["uuid"], {"status": consts.TaskStatus.FAILED})
db_task = self._get_task(task["uuid"])
self.assertEqual(db_task["status"], consts.TaskStatus.FAILED)
def test_task_update_not_found(self):
self.assertRaises(exceptions.TaskNotFound,
db.task_update,
"7ae1da26-feaa-4213-8208-76af2857a5ab", {})
def test_task_update_all_stats(self):
_uuid = self._create_task({})["uuid"]
for status in consts.TaskStatus:
db.task_update(_uuid, {"status": status})
db_task = self._get_task(_uuid)
self.assertEqual(db_task["status"], status)
def test_task_list_empty(self):
self.assertEqual([], db.task_list())
def test_task_list(self):
INIT = consts.TaskStatus.INIT
task_init = sorted(self._create_task()["uuid"] for i in moves.range(3))
FINISHED = consts.TaskStatus.FINISHED
task_finished = sorted(self._create_task(
{"status": FINISHED,
"deployment_uuid": self.deploy["uuid"]}
)["uuid"] for i in moves.range(3))
task_all = sorted(task_init + task_finished)
def get_uuids(status=None, deployment=None):
tasks = db.task_list(status=status, deployment=deployment)
return sorted(task["uuid"] for task in tasks)
self.assertEqual(task_all, get_uuids(None))
self.assertEqual(task_init, get_uuids(status=INIT))
self.assertEqual(task_finished, get_uuids(status=FINISHED))
self.assertRaises(exceptions.DeploymentNotFound,
get_uuids, deployment="non-existing-deployment")
deleted_task_uuid = task_finished.pop()
db.task_delete(deleted_task_uuid)
self.assertEqual(task_init, get_uuids(INIT))
self.assertEqual(sorted(task_finished), get_uuids(FINISHED))
def test_task_delete(self):
task1, task2 = self._create_task()["uuid"], self._create_task()["uuid"]
db.task_delete(task1)
self.assertRaises(exceptions.TaskNotFound, self._get_task, task1)
self.assertEqual(task2, self._get_task(task2)["uuid"])
def test_task_delete_not_found(self):
self.assertRaises(exceptions.TaskNotFound,
db.task_delete,
"da6f820c-b133-4b9f-8534-4c3bcc40724b")
def test_task_delete_with_results(self):
task_id = self._create_task()["uuid"]
db.task_result_create(task_id,
{task_id: task_id},
{task_id: task_id})
res = db.task_result_get_all_by_uuid(task_id)
self.assertEqual(len(res), 1)
db.task_delete(task_id)
res = db.task_result_get_all_by_uuid(task_id)
self.assertEqual(len(res), 0)
def test_task_delete_by_uuid_and_status(self):
values = {
"status": consts.TaskStatus.FINISHED,
}
task1 = self._create_task(values=values)["uuid"]
task2 = self._create_task(values=values)["uuid"]
db.task_delete(task1, status=consts.TaskStatus.FINISHED)
self.assertRaises(exceptions.TaskNotFound, self._get_task, task1)
self.assertEqual(task2, self._get_task(task2)["uuid"])
def test_task_delete_by_uuid_and_status_invalid(self):
task = self._create_task(
values={"status": consts.TaskStatus.INIT})["uuid"]
self.assertRaises(exceptions.TaskInvalidStatus, db.task_delete, task,
status=consts.TaskStatus.FINISHED)
def test_task_delete_by_uuid_and_status_not_found(self):
self.assertRaises(exceptions.TaskNotFound,
db.task_delete,
"fcd0483f-a405-44c4-b712-99c9e52254eb",
status=consts.TaskStatus.FINISHED)
def test_task_result_get_all_by_uuid(self):
task1 = self._create_task()["uuid"]
task2 = self._create_task()["uuid"]
for task_id in (task1, task2):
db.task_result_create(task_id,
{task_id: task_id},
{task_id: task_id})
for task_id in (task1, task2):
res = db.task_result_get_all_by_uuid(task_id)
data = {task_id: task_id}
self.assertEqual(len(res), 1)
self.assertEqual(res[0]["key"], data)
self.assertEqual(res[0]["data"], data)
def test_task_get_detailed(self):
task1 = self._create_task()
key = {"name": "atata"}
data = {"a": "b", "c": "d"}
db.task_result_create(task1["uuid"], key, data)
task1_full = db.task_get_detailed(task1["uuid"])
results = task1_full["results"]
self.assertEqual(len(results), 1)
self.assertEqual(results[0]["key"], key)
self.assertEqual(results[0]["data"], data)
def test_task_get_detailed_last(self):
task1 = self._create_task()
key = {"name": "atata"}
data = {"a": "b", "c": "d"}
db.task_result_create(task1["uuid"], key, data)
task1_full = db.task_get_detailed_last()
results = task1_full["results"]
self.assertEqual(len(results), 1)
self.assertEqual(results[0]["key"], key)
self.assertEqual(results[0]["data"], data)
class DeploymentTestCase(test.DBTestCase):
def test_deployment_create(self):
deploy = db.deployment_create({"config": {"opt": "val"}})
deploys = db.deployment_list()
self.assertEqual(len(deploys), 1)
self.assertEqual(deploy["uuid"], deploys[0]["uuid"])
self.assertEqual(deploy["status"], consts.DeployStatus.DEPLOY_INIT)
self.assertEqual(deploy["config"], {"opt": "val"})
def test_deployment_create_several(self):
# Create a deployment
deploys = db.deployment_list()
self.assertEqual(len(deploys), 0)
deploy_one = db.deployment_create({"config": {"opt1": "val1"}})
deploys = db.deployment_list()
self.assertEqual(len(deploys), 1)
self.assertEqual(deploy_one["uuid"], deploys[0]["uuid"])
self.assertEqual(deploy_one["status"], consts.DeployStatus.DEPLOY_INIT)
self.assertEqual(deploy_one["config"], {"opt1": "val1"})
# Create another deployment and sure that they are different
deploy_two = db.deployment_create({"config": {"opt2": "val2"}})
deploys = db.deployment_list()
self.assertEqual(len(deploys), 2)
self.assertEqual(set([deploy_one["uuid"], deploy_two["uuid"]]),
set([deploy["uuid"] for deploy in deploys]))
self.assertNotEqual(deploy_one["uuid"], deploy_two["uuid"])
self.assertEqual(deploy_two["status"], consts.DeployStatus.DEPLOY_INIT)
self.assertEqual(deploy_two["config"], {"opt2": "val2"})
def test_deployment_update(self):
deploy = db.deployment_create({})
self.assertEqual(deploy["config"], {})
update_deploy = db.deployment_update(deploy["uuid"],
{"config": {"opt": "val"}})
self.assertEqual(update_deploy["uuid"], deploy["uuid"])
self.assertEqual(update_deploy["config"], {"opt": "val"})
get_deploy = db.deployment_get(deploy["uuid"])
self.assertEqual(get_deploy["uuid"], deploy["uuid"])
self.assertEqual(get_deploy["config"], {"opt": "val"})
def test_deployment_update_several(self):
# Create a deployment and update it
deploy_one = db.deployment_create({})
self.assertEqual(deploy_one["config"], {})
update_deploy_one = db.deployment_update(
deploy_one["uuid"], {"config": {"opt1": "val1"}})
self.assertEqual(update_deploy_one["uuid"], deploy_one["uuid"])
self.assertEqual(update_deploy_one["config"], {"opt1": "val1"})
get_deploy_one = db.deployment_get(deploy_one["uuid"])
self.assertEqual(get_deploy_one["uuid"], deploy_one["uuid"])
self.assertEqual(get_deploy_one["config"], {"opt1": "val1"})
# Create another deployment
deploy_two = db.deployment_create({})
update_deploy_two = db.deployment_update(
deploy_two["uuid"], {"config": {"opt2": "val2"}})
self.assertEqual(update_deploy_two["uuid"], deploy_two["uuid"])
self.assertEqual(update_deploy_two["config"], {"opt2": "val2"})
get_deploy_one_again = db.deployment_get(deploy_one["uuid"])
self.assertEqual(get_deploy_one_again["uuid"], deploy_one["uuid"])
self.assertEqual(get_deploy_one_again["config"], {"opt1": "val1"})
def test_deployment_get(self):
deploy_one = db.deployment_create({"config": {"opt1": "val1"}})
deploy_two = db.deployment_create({"config": {"opt2": "val2"}})
get_deploy_one = db.deployment_get(deploy_one["uuid"])
get_deploy_two = db.deployment_get(deploy_two["uuid"])
self.assertNotEqual(get_deploy_one["uuid"], get_deploy_two["uuid"])
self.assertEqual(get_deploy_one["config"], {"opt1": "val1"})
self.assertEqual(get_deploy_two["config"], {"opt2": "val2"})
def test_deployment_get_not_found(self):
self.assertRaises(exceptions.DeploymentNotFound,
db.deployment_get,
"852e932b-9552-4b2d-89e3-a5915780a5e3")
def test_deployment_list(self):
deploy_one = db.deployment_create({})
deploy_two = db.deployment_create({})
deploys = db.deployment_list()
self.assertEqual(sorted([deploy_one["uuid"], deploy_two["uuid"]]),
sorted([deploy["uuid"] for deploy in deploys]))
def test_deployment_list_with_status_and_name(self):
deploy_one = db.deployment_create({})
deploy_two = db.deployment_create({
"config": {},
"status": consts.DeployStatus.DEPLOY_FAILED,
})
deploy_three = db.deployment_create({"name": "deployment_name"})
deploys = db.deployment_list(status=consts.DeployStatus.DEPLOY_INIT)
deploys.sort(key=lambda x: x["id"])
self.assertEqual(len(deploys), 2)
self.assertEqual(deploys[0]["uuid"], deploy_one["uuid"])
deploys = db.deployment_list(status=consts.DeployStatus.DEPLOY_FAILED)
self.assertEqual(len(deploys), 1)
self.assertEqual(deploys[0]["uuid"], deploy_two["uuid"])
deploys = db.deployment_list(
status=consts.DeployStatus.DEPLOY_FINISHED)
self.assertEqual(len(deploys), 0)
deploys = db.deployment_list(name="deployment_name")
self.assertEqual(deploys[0]["uuid"], deploy_three["uuid"])
self.assertEqual(len(deploys), 1)
def test_deployment_list_parent(self):
deploy = db.deployment_create({})
subdeploy1 = db.deployment_create({"parent_uuid": deploy.uuid})
subdeploy2 = db.deployment_create({"parent_uuid": deploy.uuid})
self.assertEqual([deploy.uuid], [d.uuid for d in db.deployment_list()])
subdeploys = db.deployment_list(parent_uuid=deploy.uuid)
self.assertEqual(set([subdeploy1.uuid, subdeploy2.uuid]),
set([d.uuid for d in subdeploys]))
def test_deployment_delete(self):
deploy_one = db.deployment_create({})
deploy_two = db.deployment_create({})
db.deployment_delete(deploy_two["uuid"])
deploys = db.deployment_list()
self.assertEqual(len(deploys), 1)
self.assertEqual(deploys[0]["uuid"], deploy_one["uuid"])
def test_deployment_delete_not_found(self):
self.assertRaises(exceptions.DeploymentNotFound,
db.deployment_delete,
"5f2883be-46c8-4c4b-a4fe-988ad0c6b20a")
def test_deployment_delete_is_busy(self):
deployment = db.deployment_create({})
db.resource_create({"deployment_uuid": deployment["uuid"]})
db.resource_create({"deployment_uuid": deployment["uuid"]})
self.assertRaises(exceptions.DeploymentIsBusy, db.deployment_delete,
deployment["uuid"])
class ResourceTestCase(test.DBTestCase):
def test_create(self):
deployment = db.deployment_create({})
resource = db.resource_create({
"deployment_uuid": deployment["uuid"],
"provider_name": "fakeprovider",
"type": "faketype",
})
resources = db.resource_get_all(deployment["uuid"])
self.assertTrue(resource["id"])
self.assertEqual(len(resources), 1)
self.assertTrue(resource["id"], resources[0]["id"])
self.assertEqual(resource["deployment_uuid"], deployment["uuid"])
self.assertEqual(resource["provider_name"], "fakeprovider")
self.assertEqual(resource["type"], "faketype")
def test_delete(self):
deployment = db.deployment_create({})
res = db.resource_create({"deployment_uuid": deployment["uuid"]})
db.resource_delete(res["id"])
resources = db.resource_get_all(deployment["uuid"])
self.assertEqual(len(resources), 0)
def test_delete_not_found(self):
self.assertRaises(exceptions.ResourceNotFound,
db.resource_delete, 123456789)
def test_get_all(self):
deployment0 = db.deployment_create({})
deployment1 = db.deployment_create({})
res0 = db.resource_create({"deployment_uuid": deployment0["uuid"]})
res1 = db.resource_create({"deployment_uuid": deployment1["uuid"]})
res2 = db.resource_create({"deployment_uuid": deployment1["uuid"]})
resources = db.resource_get_all(deployment1["uuid"])
self.assertEqual(sorted([res1["id"], res2["id"]]),
sorted([r["id"] for r in resources]))
resources = db.resource_get_all(deployment0["uuid"])
self.assertEqual(len(resources), 1)
self.assertEqual(res0["id"], resources[0]["id"])
def test_get_all_by_provider_name(self):
deployment = db.deployment_create({})
res_one = db.resource_create({
"deployment_uuid": deployment["uuid"],
"provider_name": "one",
})
res_two = db.resource_create({
"deployment_uuid": deployment["uuid"],
"provider_name": "two",
})
resources = db.resource_get_all(deployment["uuid"],
provider_name="one")
self.assertEqual(len(resources), 1)
self.assertEqual(res_one["id"], resources[0]["id"])
resources = db.resource_get_all(deployment["uuid"],
provider_name="two")
self.assertEqual(len(resources), 1)
self.assertEqual(res_two["id"], resources[0]["id"])
def test_get_all_by_provider_type(self):
deployment = db.deployment_create({})
res_one = db.resource_create({
"deployment_uuid": deployment["uuid"],
"type": "one",
})
res_two = db.resource_create({
"deployment_uuid": deployment["uuid"],
"type": "two",
})
resources = db.resource_get_all(deployment["uuid"], type="one")
self.assertEqual(len(resources), 1)
self.assertEqual(res_one["id"], resources[0]["id"])
resources = db.resource_get_all(deployment["uuid"], type="two")
self.assertEqual(len(resources), 1)
self.assertEqual(res_two["id"], resources[0]["id"])
class VerificationTestCase(test.DBTestCase):
def setUp(self):
super(VerificationTestCase, self).setUp()
self.deploy = db.deployment_create({})
def _create_verification(self):
deployment_uuid = self.deploy["uuid"]
return db.verification_create(deployment_uuid)
def test_creation_of_verification(self):
verification = self._create_verification()
db_verification = db.verification_get(verification["uuid"])
self.assertEqual(verification["tests"], db_verification["tests"])
self.assertEqual(verification["time"], db_verification["time"])
self.assertEqual(verification["errors"], db_verification["errors"])
self.assertEqual(verification["failures"], db_verification["failures"])
class WorkerTestCase(test.DBTestCase):
def setUp(self):
super(WorkerTestCase, self).setUp()
self.worker = db.register_worker({"hostname": "test"})
def test_register_worker_duplicate(self):
self.assertRaises(exceptions.WorkerAlreadyRegistered,
db.register_worker, {"hostname": "test"})
def test_get_worker(self):
worker = db.get_worker("test")
self.assertEqual(self.worker["id"], worker["id"])
self.assertEqual(self.worker["hostname"], worker["hostname"])
def test_get_worker_not_found(self):
self.assertRaises(exceptions.WorkerNotFound, db.get_worker, "notfound")
def test_unregister_worker(self):
db.unregister_worker("test")
self.assertRaises(exceptions.WorkerNotFound, db.get_worker, "test")
def test_unregister_worker_not_found(self):
|
def test_update_worker(self):
db.update_worker("test")
worker = db.get_worker("test")
self.assertNotEqual(self.worker["updated_at"], worker["updated_at"])
def test_update_worker_not_found(self):
self.assertRaises(exceptions.WorkerNotFound, db.update_worker, "fake")
| self.assertRaises(exceptions.WorkerNotFound,
db.unregister_worker, "fake") |
annotation.go | // Copyright The OpenTelemetry Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package sidecar
import (
"strings"
corev1 "k8s.io/api/core/v1"
)
const (
// Annotation contains the annotation name that pods contain, indicating whether a sidecar is desired.
Annotation = "sidecar.opentelemetry.io/inject"
)
// annotationValue returns the effective annotation value, based on the annotations from the pod and namespace.
func | (ns corev1.Namespace, pod corev1.Pod) string {
// is the pod annotated with instructions to inject sidecars? is the namespace annotated?
// if any of those is true, a sidecar might be desired.
podAnnValue := pod.Annotations[Annotation]
nsAnnValue := ns.Annotations[Annotation]
// if the namespace value is empty, the pod annotation should be used, whatever it is
if len(nsAnnValue) == 0 {
return podAnnValue
}
// if the pod value is empty, the annotation annotation should be used (true, false, instance)
if len(podAnnValue) == 0 {
return nsAnnValue
}
// the pod annotation isn't empty -- if it's an instance name, or false, that's the decision
if !strings.EqualFold(podAnnValue, "true") {
return podAnnValue
}
// pod annotation is 'true', and if the namespace annotation is false, we just return 'true'
if strings.EqualFold(nsAnnValue, "false") {
return podAnnValue
}
// by now, the pod annotation is 'true', and the namespace annotation is either true or an instance name
// so, the namespace annotation can be used
return nsAnnValue
}
| annotationValue |
api_op_UpdateCapacityProvider.go | // Code generated by smithy-go-codegen DO NOT EDIT.
package ecs
import (
"context"
awsmiddleware "github.com/aws/aws-sdk-go-v2/aws/middleware"
"github.com/aws/aws-sdk-go-v2/aws/signer/v4"
"github.com/aws/aws-sdk-go-v2/service/ecs/types"
"github.com/aws/smithy-go/middleware"
smithyhttp "github.com/aws/smithy-go/transport/http"
)
// Modifies the parameters for a capacity provider.
func (c *Client) UpdateCapacityProvider(ctx context.Context, params *UpdateCapacityProviderInput, optFns ...func(*Options)) (*UpdateCapacityProviderOutput, error) {
if params == nil {
params = &UpdateCapacityProviderInput{}
}
result, metadata, err := c.invokeOperation(ctx, "UpdateCapacityProvider", params, optFns, addOperationUpdateCapacityProviderMiddlewares)
if err != nil {
return nil, err
}
out := result.(*UpdateCapacityProviderOutput)
out.ResultMetadata = metadata
return out, nil
}
type UpdateCapacityProviderInput struct {
// The name of the capacity provider to update.
//
// This member is required.
AutoScalingGroupProvider *types.AutoScalingGroupProviderUpdate
// An object representing the parameters to update for the Auto Scaling group
// capacity provider.
//
// This member is required.
Name *string
}
type UpdateCapacityProviderOutput struct {
// The details of a capacity provider.
CapacityProvider *types.CapacityProvider
// Metadata pertaining to the operation's result.
ResultMetadata middleware.Metadata
}
func addOperationUpdateCapacityProviderMiddlewares(stack *middleware.Stack, options Options) (err error) |
func newServiceMetadataMiddleware_opUpdateCapacityProvider(region string) *awsmiddleware.RegisterServiceMetadata {
return &awsmiddleware.RegisterServiceMetadata{
Region: region,
ServiceID: ServiceID,
SigningName: "ecs",
OperationName: "UpdateCapacityProvider",
}
}
| {
err = stack.Serialize.Add(&awsAwsjson11_serializeOpUpdateCapacityProvider{}, middleware.After)
if err != nil {
return err
}
err = stack.Deserialize.Add(&awsAwsjson11_deserializeOpUpdateCapacityProvider{}, middleware.After)
if err != nil {
return err
}
if err = addSetLoggerMiddleware(stack, options); err != nil {
return err
}
if err = awsmiddleware.AddClientRequestIDMiddleware(stack); err != nil {
return err
}
if err = smithyhttp.AddComputeContentLengthMiddleware(stack); err != nil {
return err
}
if err = addResolveEndpointMiddleware(stack, options); err != nil {
return err
}
if err = v4.AddComputePayloadSHA256Middleware(stack); err != nil {
return err
}
if err = addRetryMiddlewares(stack, options); err != nil {
return err
}
if err = addHTTPSignerV4Middleware(stack, options); err != nil {
return err
}
if err = awsmiddleware.AddRawResponseToMetadata(stack); err != nil {
return err
}
if err = awsmiddleware.AddRecordResponseTiming(stack); err != nil {
return err
}
if err = addClientUserAgent(stack); err != nil {
return err
}
if err = smithyhttp.AddErrorCloseResponseBodyMiddleware(stack); err != nil {
return err
}
if err = smithyhttp.AddCloseResponseBodyMiddleware(stack); err != nil {
return err
}
if err = addOpUpdateCapacityProviderValidationMiddleware(stack); err != nil {
return err
}
if err = stack.Initialize.Add(newServiceMetadataMiddleware_opUpdateCapacityProvider(options.Region), middleware.Before); err != nil {
return err
}
if err = addRequestIDRetrieverMiddleware(stack); err != nil {
return err
}
if err = addResponseErrorMiddleware(stack); err != nil {
return err
}
if err = addRequestResponseLogging(stack, options); err != nil {
return err
}
return nil
} |
arg_parser.py | import pathlib
import argparse
usage_string = '''Welcome to Olymptester!
Usage:
> olymptester run|test|init [-d dirname] [-p path/to/app] [-t path/to/tests]'''
def path(s):
return pathlib.Path(s).absolute()
def validate_paths(d):
if d['mode'] in ['run', 'test']:
workdir = path(d['workdir'])
solution = workdir / d['path_to_program']
tests = workdir / d['path_to_tests']
assert workdir.is_dir(), f'directory "{workdir.name}" does not exist'
assert solution.is_file(), f'file {solution.name} does not exist'
assert tests.is_file(), f'file {tests.name} does not exist'
def | ():
parser = argparse.ArgumentParser()
parser.add_argument("mode", help="mode")
parser.add_argument("-d", "--dir", help="directory")
parser.add_argument("-p", "--program", help="path to program")
parser.add_argument("-t", "--tests", help="path to tests")
parser.add_argument("-n", "--name", help="test name")
args = parser.parse_args()
assert args.mode in ['init', 'test', 'run'], usage_string
if args.mode == 'init':
dir = '.' if args.dir is None else args.dir
return {
'mode': args.mode,
'dir': dir
}
path_to_program = 'solution.cpp' if args.program is None else args.program
path_to_tests = 'tests.yml' if args.tests is None else args.tests
return {
'workdir' : '.',
'path_to_program': path_to_program,
'path_to_tests' : path_to_tests,
'mode' : args.mode,
'test_name': args.name
}
| parse_args |
Trail06.go | package main
import "fmt"
func Factorial(n uint64) uint64 |
func main() {
var i int = 15
fmt.Printf("%d 的阶乘是 %d\n", i, Factorial(uint64(i)))
// fmt.printf("%s/n",$goPath)
}
| {
var result uint64
if n > 0 {
result = n * Factorial(n-1)
return result
}
return 1
} |
dataset.py | # -*- coding: utf-8 -*-
"""
Created on December 30, 2020
@author: Siqi Miao
"""
import torch
from torch_sparse import SparseTensor
import torch_geometric.transforms as T
from pathlib2 import Path
import scipy.io as sio
from sklearn.metrics import f1_score, accuracy_score
from sklearn.model_selection import train_test_split
from skmultilearn.model_selection import iterative_train_test_split
from ogb.nodeproppred import PygNodePropPredDataset, Evaluator
class Dataset(object):
def __init__(self, root, name, make_edge_index=False):
self.root = root
self.name = name
self.make_edge_index = make_edge_index
self.num_classes = None
self.split_idx = None
self.x = None
self.y = None
self.adj_t = None
self.edge_index = None
self.num_nodes = None
self.criterion = None
self.metric = None
self.heterophily_dataset = ['chameleon', 'actor']
if name == 'ogb':
self.setup_ogb()
elif name == 'wiki':
self.setup_wiki()
elif name in self.heterophily_dataset:
self.setup_geom()
else:
raise KeyboardInterrupt
def setup_ogb(self):
dataset = PygNodePropPredDataset(name='ogbn-arxiv', root=self.root, transform=T.ToSparseTensor())
data = dataset[0]
self.metric = 'Accuracy'
self.num_classes = dataset.num_classes
self.split_idx = dataset.get_idx_split()
self.x = data.x
self.y = data.y
self.adj_t = data.adj_t.to_symmetric()
self.num_nodes = data.num_nodes
if self.make_edge_index:
row = self.adj_t.storage.row()
col = self.adj_t.storage.col()
self.edge_index = torch.stack((row, col), dim=0)
self.criterion = torch.nn.CrossEntropyLoss()
def setup_wiki(self):
mat = sio.loadmat(self.root / 'wiki' / 'POS.mat')
self.metric = 'MicroF1'
self.num_nodes = 4777
self.num_classes = 40
adj_t = mat['network'].tocoo()
self.adj_t = SparseTensor(row=torch.LongTensor(adj_t.row), col=torch.LongTensor(adj_t.col),
sparse_sizes=(self.num_nodes, self.num_nodes))
if self.make_edge_index:
row = self.adj_t.storage.row()
col = self.adj_t.storage.col()
self.edge_index = torch.stack((row, col), dim=0)
self.y = torch.from_numpy(mat['group'].todense()).float()
idx = torch.arange(self.y.shape[0]).view(-1, 1) |
self.criterion = torch.nn.BCEWithLogitsLoss() # for multi-label classification
def setup_geom(self):
edge_file = self.root / self.name / 'out1_graph_edges.txt'
feature_label_file = self.root / self.name / 'out1_node_feature_label.txt'
self.metric = 'Accuracy'
edges = edge_file.open('r').readlines()[1:]
edges = torch.LongTensor([(lambda x: [int(x[0]), int(x[1])])(edge.strip().split('\t')) for edge in edges])
self.num_nodes = torch.max(edges).item() + 1
self.adj_t = SparseTensor(row=torch.LongTensor(edges[:, 0]), col=torch.LongTensor(edges[:, 1]),
sparse_sizes=(self.num_nodes, self.num_nodes))
# self.adj_t = self.adj_t.to_symmetric()
if self.make_edge_index:
self.edge_index = edges.t()
idx = []
x = []
y = []
xy = feature_label_file.open('r').readlines()[1:]
for line in xy:
node_id, feature, label = line.strip().split('\t')
idx.append(int(node_id))
if self.name == 'actor':
one_hot = torch.zeros(932)
pos_with_ones = list(map(int, feature.split(',')))
one_hot[pos_with_ones] = 1
x.append(one_hot.int().tolist())
else:
x.append(list(map(int, feature.split(','))))
y.append(int(label))
_, indices = torch.sort(torch.LongTensor(idx))
self.x = torch.LongTensor(x)[indices]
self.y = torch.LongTensor(y).view(-1, 1)[indices]
self.num_classes = torch.max(self.y).item() + 1
idx = torch.arange(self.y.shape[0]).view(-1, 1)
train_idx, val_test_idx = train_test_split(idx, test_size=0.4, stratify=self.y)
val_idx, test_idx = train_test_split(val_test_idx, test_size=0.5, stratify=self.y[val_test_idx.squeeze()])
self.split_idx = {'train': train_idx.view(-1), 'valid': val_idx.view(-1), 'test': test_idx.view(-1)}
self.criterion = torch.nn.CrossEntropyLoss()
def eval(self, y_true, logits, split_idx):
if self.name == 'ogb':
evaluator = Evaluator(name='ogbn-arxiv')
y_pred = logits.argmax(dim=1, keepdim=True)
train_acc = evaluator.eval({
'y_true': y_true[split_idx['train']],
'y_pred': y_pred[split_idx['train']],
})['acc']
valid_acc = evaluator.eval({
'y_true': y_true[split_idx['valid']],
'y_pred': y_pred[split_idx['valid']],
})['acc']
test_acc = evaluator.eval({
'y_true': y_true[split_idx['test']],
'y_pred': y_pred[split_idx['test']],
})['acc']
return train_acc, valid_acc, test_acc
elif self.name == 'wiki':
y_pred = torch.sigmoid(logits) > 0.5
train_f1 = f1_score(y_true[split_idx['train']], y_pred[split_idx['train']], average='micro')
valid_f1 = f1_score(y_true[split_idx['valid']], y_pred[split_idx['valid']], average='micro')
test_f1 = f1_score(y_true[split_idx['test']], y_pred[split_idx['test']], average='micro')
return train_f1, valid_f1, test_f1
elif self.name in self.heterophily_dataset:
y_pred = logits.argmax(dim=1, keepdim=True)
train_acc = accuracy_score(y_true[split_idx['train']], y_pred[split_idx['train']])
valid_acc = accuracy_score(y_true[split_idx['valid']], y_pred[split_idx['valid']])
test_acc = accuracy_score(y_true[split_idx['test']], y_pred[split_idx['test']])
return train_acc, valid_acc, test_acc
if __name__ == '__main__':
data = Dataset(root=Path('../dataset'), name='ogb', make_edge_index=True) | train_idx, _, test_idx, _ = iterative_train_test_split(idx, self.y, test_size=0.1)
self.split_idx = {'train': train_idx.view(-1), 'valid': test_idx.view(-1), 'test': test_idx.view(-1)} |
request_parts.rs | use super::{rejection::*, take_body, Extension, FromRequest, RequestParts};
use crate::{
body::{Body, Bytes, HttpBody},
BoxError, Error,
};
use async_trait::async_trait;
use futures_util::stream::Stream;
use http::Uri;
use std::{
convert::Infallible,
fmt,
pin::Pin,
task::{Context, Poll},
};
use sync_wrapper::SyncWrapper;
/// Extractor that gets the original request URI regardless of nesting.
///
/// This is necessary since [`Uri`](http::Uri), when used as an extractor, will
/// have the prefix stripped if used in a nested service.
///
/// # Example
///
/// ```
/// use axum::{
/// routing::get,
/// Router,
/// extract::OriginalUri,
/// http::Uri
/// };
///
/// let api_routes = Router::new()
/// .route(
/// "/users",
/// get(|uri: Uri, OriginalUri(original_uri): OriginalUri| async {
/// // `uri` is `/users`
/// // `original_uri` is `/api/users`
/// }),
/// );
///
/// let app = Router::new().nest("/api", api_routes);
/// # async {
/// # axum::Server::bind(&"".parse().unwrap()).serve(app.into_make_service()).await.unwrap();
/// # };
/// ```
///
/// # Extracting via request extensions
///
/// `OriginalUri` can also be accessed from middleware via request extensions.
/// This is useful for example with [`Trace`](tower_http::trace::Trace) to
/// create a span that contains the full path, if your service might be nested:
///
/// ```
/// use axum::{
/// Router,
/// extract::OriginalUri,
/// http::Request,
/// routing::get,
/// };
/// use tower_http::trace::TraceLayer;
///
/// let api_routes = Router::new()
/// .route("/users/:id", get(|| async { /* ... */ }))
/// .layer(
/// TraceLayer::new_for_http().make_span_with(|req: &Request<_>| {
/// let path = if let Some(path) = req.extensions().get::<OriginalUri>() {
/// // This will include `/api`
/// path.0.path().to_owned()
/// } else {
/// // The `OriginalUri` extension will always be present if using
/// // `Router` unless another extractor or middleware has removed it
/// req.uri().path().to_owned()
/// };
/// tracing::info_span!("http-request", %path)
/// }),
/// );
///
/// let app = Router::new().nest("/api", api_routes);
/// # async {
/// # axum::Server::bind(&"".parse().unwrap()).serve(app.into_make_service()).await.unwrap();
/// # };
/// ```
#[cfg(feature = "original-uri")]
#[derive(Debug, Clone)]
pub struct OriginalUri(pub Uri);
#[cfg(feature = "original-uri")]
#[async_trait]
impl<B> FromRequest<B> for OriginalUri
where
B: Send,
{
type Rejection = Infallible;
async fn from_request(req: &mut RequestParts<B>) -> Result<Self, Self::Rejection> {
let uri = Extension::<Self>::from_request(req)
.await
.unwrap_or_else(|_| Extension(OriginalUri(req.uri().clone())))
.0;
Ok(uri)
}
}
/// Extractor that extracts the request body as a [`Stream`].
///
/// Note if your request body is [`body::Body`] you can extract that directly
/// and since it already implements [`Stream`] you don't need this type. The
/// purpose of this type is to extract other types of request bodies as a
/// [`Stream`].
///
/// # Example
///
/// ```rust,no_run
/// use axum::{
/// extract::BodyStream,
/// routing::get,
/// Router,
/// };
/// use futures::StreamExt;
///
/// async fn handler(mut stream: BodyStream) {
/// while let Some(chunk) = stream.next().await {
/// // ...
/// }
/// }
///
/// let app = Router::new().route("/users", get(handler));
/// # async {
/// # axum::Server::bind(&"".parse().unwrap()).serve(app.into_make_service()).await.unwrap();
/// # };
/// ```
///
/// [`Stream`]: https://docs.rs/futures/latest/futures/stream/trait.Stream.html
/// [`body::Body`]: crate::body::Body
pub struct BodyStream(
SyncWrapper<Pin<Box<dyn HttpBody<Data = Bytes, Error = Error> + Send + 'static>>>,
);
impl Stream for BodyStream {
type Item = Result<Bytes, Error>;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
Pin::new(self.0.get_mut()).poll_data(cx)
}
}
#[async_trait]
impl<B> FromRequest<B> for BodyStream
where
B: HttpBody + Send + 'static,
B::Data: Into<Bytes>,
B::Error: Into<BoxError>,
{
type Rejection = BodyAlreadyExtracted;
async fn from_request(req: &mut RequestParts<B>) -> Result<Self, Self::Rejection> |
}
impl fmt::Debug for BodyStream {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("BodyStream").finish()
}
}
#[test]
fn body_stream_traits() {
crate::test_helpers::assert_send::<BodyStream>();
crate::test_helpers::assert_sync::<BodyStream>();
}
/// Extractor that extracts the raw request body.
///
/// Note that [`body::Body`] can be extracted directly. This purpose of this
/// type is to extract other types of request bodies.
///
/// # Example
///
/// ```rust,no_run
/// use axum::{
/// extract::RawBody,
/// routing::get,
/// Router,
/// };
/// use futures::StreamExt;
///
/// async fn handler(RawBody(body): RawBody) {
/// // ...
/// }
///
/// let app = Router::new().route("/users", get(handler));
/// # async {
/// # axum::Server::bind(&"".parse().unwrap()).serve(app.into_make_service()).await.unwrap();
/// # };
/// ```
///
/// [`body::Body`]: crate::body::Body
#[derive(Debug, Default, Clone)]
pub struct RawBody<B = Body>(pub B);
#[async_trait]
impl<B> FromRequest<B> for RawBody<B>
where
B: Send,
{
type Rejection = BodyAlreadyExtracted;
async fn from_request(req: &mut RequestParts<B>) -> Result<Self, Self::Rejection> {
let body = take_body(req)?;
Ok(Self(body))
}
}
#[cfg(test)]
mod tests {
use crate::{
body::Body,
routing::{get, post},
test_helpers::*,
AddExtensionLayer, Router,
};
use http::{Method, Request, StatusCode};
#[tokio::test]
async fn multiple_request_extractors() {
async fn handler(_: Request<Body>, _: Request<Body>) {}
let app = Router::new().route("/", post(handler));
let client = TestClient::new(app);
let res = client.post("/").body("hi there").send().await;
assert_eq!(res.status(), StatusCode::INTERNAL_SERVER_ERROR);
assert_eq!(
res.text().await,
"Cannot have two request body extractors for a single handler"
);
}
#[tokio::test]
async fn extract_request_parts() {
#[derive(Clone)]
struct Ext;
async fn handler(parts: http::request::Parts) {
assert_eq!(parts.method, Method::GET);
assert_eq!(parts.uri, "/");
assert_eq!(parts.version, http::Version::HTTP_11);
assert_eq!(parts.headers["x-foo"], "123");
parts.extensions.get::<Ext>().unwrap();
}
let client = TestClient::new(
Router::new()
.route("/", get(handler))
.layer(AddExtensionLayer::new(Ext)),
);
let res = client.get("/").header("x-foo", "123").send().await;
assert_eq!(res.status(), StatusCode::OK);
}
#[tokio::test]
async fn extract_request_parts_doesnt_consume_the_body() {
#[derive(Clone)]
struct Ext;
async fn handler(_parts: http::request::Parts, body: String) {
assert_eq!(body, "foo");
}
let client = TestClient::new(Router::new().route("/", get(handler)));
let res = client.get("/").body("foo").send().await;
assert_eq!(res.status(), StatusCode::OK);
}
}
| {
let body = take_body(req)?
.map_data(Into::into)
.map_err(|err| Error::new(err.into()));
let stream = BodyStream(SyncWrapper::new(Box::pin(body)));
Ok(stream)
} |
provider.go | package machine
import (
"errors"
"fmt"
"sync"
"time"
"github.com/prometheus/client_golang/prometheus"
"github.com/sirupsen/logrus"
"github.com/debu99/cicd-runner/common"
"github.com/debu99/cicd-runner/helpers/docker"
)
type machineProvider struct {
name string
machine docker.Machine
details machinesDetails
runners runnersDetails
lock sync.RWMutex
acquireLock sync.Mutex
// provider stores a real executor that is used to start run the builds
provider common.ExecutorProvider
stuckRemoveLock sync.Mutex
// metrics
totalActions *prometheus.CounterVec
currentStatesDesc *prometheus.Desc
creationHistogram prometheus.Histogram
}
func (m *machineProvider) machineDetails(name string, acquire bool) *machineDetails {
details := m.ensureDetails(name)
if acquire {
details = m.tryAcquireMachineDetails(details)
}
return details
}
func (m *machineProvider) ensureDetails(name string) *machineDetails {
m.lock.Lock()
defer m.lock.Unlock()
details, ok := m.details[name]
if !ok {
now := time.Now()
details = &machineDetails{
Name: name,
Created: now,
Used: now,
LastSeen: now,
UsedCount: 1, // any machine that we find we mark as already used
State: machineStateIdle,
}
m.details[name] = details
}
return details
}
var errNoConfig = errors.New("no runner config specified")
func (m *machineProvider) runnerMachinesCoordinator(config *common.RunnerConfig) (*runnerMachinesCoordinator, error) {
if config == nil {
return nil, errNoConfig
}
m.lock.Lock()
defer m.lock.Unlock()
details, ok := m.runners[config.GetToken()]
if !ok {
details = newRunnerMachinesCoordinator()
m.runners[config.GetToken()] = details
}
return details, nil
}
func (m *machineProvider) create(config *common.RunnerConfig, state machineState) (*machineDetails, chan error) {
name := newMachineName(config)
details := m.machineDetails(name, true)
m.lock.Lock()
details.State = machineStateCreating
details.UsedCount = 0
details.RetryCount = 0
details.LastSeen = time.Now()
m.lock.Unlock()
errCh := make(chan error, 1)
// Create machine with the required configuration asynchronously
coordinator, err := m.runnerMachinesCoordinator(config)
if err != nil {
errCh <- err
return nil, errCh
}
go coordinator.waitForGrowthCapacity(config.Machine.MaxGrowthRate, func() {
started := time.Now()
err := m.machine.Create(config.Machine.MachineDriver, details.Name, config.Machine.MachineOptions...)
for i := 0; i < 3 && err != nil; i++ {
details.RetryCount++
logrus.WithField("name", details.Name).
WithError(err).
Warningln("Machine creation failed, trying to provision")
time.Sleep(provisionRetryInterval)
err = m.machine.Provision(details.Name)
}
if err != nil {
logrus.WithField("name", details.Name).
WithField("time", time.Since(started)).
WithError(err).
Errorln("Machine creation failed")
_ = m.remove(details.Name, "Failed to create")
} else {
m.lock.Lock()
details.State = state
details.Used = time.Now()
m.lock.Unlock()
creationTime := time.Since(started)
m.lock.RLock()
logrus.WithField("duration", creationTime).
WithField("name", details.Name).
WithField("now", time.Now()).
WithField("retries", details.RetryCount).
Infoln("Machine created")
m.lock.RUnlock()
m.totalActions.WithLabelValues("created").Inc()
m.creationHistogram.Observe(creationTime.Seconds())
// Signal that a new machine is available. When there's contention, there's no guarantee between the
// ordering of reading from errCh and the availability check.
coordinator.addAvailableMachine()
}
errCh <- err
})
return details, errCh
}
func (m *machineProvider) findFreeMachine(skipCache bool, machines ...string) (details *machineDetails) {
// Enumerate all machines in reverse order, to always take the newest machines first
for idx := range machines {
name := machines[len(machines)-idx-1]
details := m.machineDetails(name, true)
if details == nil {
continue
}
// Check if node is running
canConnect := m.machine.CanConnect(name, skipCache)
if !canConnect {
_ = m.remove(name, "machine is unavailable")
continue
}
return details
}
return nil
}
func (m *machineProvider) findFreeExistingMachine(config *common.RunnerConfig) (*machineDetails, error) {
machines, err := m.loadMachines(config)
if err != nil {
return nil, err
}
return m.findFreeMachine(true, machines...), nil
}
func (m *machineProvider) useMachine(config *common.RunnerConfig) (*machineDetails, error) {
details, err := m.findFreeExistingMachine(config)
if err != nil || details != nil {
return details, err
}
return m.createAndAcquireMachine(config)
}
func (m *machineProvider) createAndAcquireMachine(config *common.RunnerConfig) (*machineDetails, error) {
coordinator, err := m.runnerMachinesCoordinator(config)
if err != nil {
return nil, err
}
newDetails, errCh := m.create(config, machineStateIdle)
// Use either a free machine, or the created machine; whichever comes first. There's no guarantee that the created
// machine can be used by us because between the time the machine is created, and the acquisition of the machine,
// another goroutine may have found it via findFreeMachine and acquired it.
var details *machineDetails
for details == nil && err == nil {
select {
case err = <-errCh:
if err != nil {
return nil, err
}
details = m.tryAcquireMachineDetails(newDetails)
case <-coordinator.availableMachineSignal():
// Even though the signal is fired and we are *almost* sure that
// there's a machine available, let's use the getAvailableMachine
// method so that the internal counter is synchonized with what
// we are actually doing and so that we can be sure that no other
// goroutine that didn't accept the signal and instead used the ticker
// hasn't already snatched a machine
details, err = m.tryGetFreeExistingMachineFromCoordinator(config, coordinator)
case <-time.After(time.Second):
details, err = m.tryGetFreeExistingMachineFromCoordinator(config, coordinator)
}
}
return details, err
}
func (m *machineProvider) tryGetFreeExistingMachineFromCoordinator(
config *common.RunnerConfig,
coordinator *runnerMachinesCoordinator,
) (*machineDetails, error) {
if coordinator.getAvailableMachine() {
return m.findFreeExistingMachine(config)
}
return nil, nil
}
func (m *machineProvider) tryAcquireMachineDetails(details *machineDetails) *machineDetails {
m.lock.Lock()
defer m.lock.Unlock()
if details.isUsed() {
return nil
}
details.State = machineStateAcquired
return details
}
func (m *machineProvider) retryUseMachine(config *common.RunnerConfig) (details *machineDetails, err error) {
// Try to find a machine
for i := 0; i < 3; i++ {
details, err = m.useMachine(config)
if err == nil {
break
}
time.Sleep(provisionRetryInterval)
}
return
}
func (m *machineProvider) removeMachine(details *machineDetails) (err error) {
if !m.machine.Exist(details.Name) {
details.logger().
Warningln("Skipping machine removal, because it doesn't exist")
return nil
}
// This code limits amount of removal of stuck machines to one machine per interval
if details.isStuckOnRemove() {
m.stuckRemoveLock.Lock()
defer m.stuckRemoveLock.Unlock()
}
details.logger().
Warningln("Stopping machine")
err = m.machine.Stop(details.Name, machineStopCommandTimeout)
if err != nil {
details.logger().
WithError(err).
Warningln("Error while stopping machine")
}
details.logger().
Warningln("Removing machine")
err = m.machine.Remove(details.Name)
if err != nil {
details.RetryCount++
time.Sleep(removeRetryInterval)
return err
}
return nil
}
func (m *machineProvider) finalizeRemoval(details *machineDetails) {
for {
err := m.removeMachine(details)
if err == nil {
break
}
}
m.lock.Lock()
defer m.lock.Unlock()
delete(m.details, details.Name)
details.logger().
WithField("now", time.Now()).
WithField("retries", details.RetryCount).
Infoln("Machine removed")
m.totalActions.WithLabelValues("removed").Inc()
}
func (m *machineProvider) remove(machineName string, reason ...interface{}) error {
m.lock.Lock()
defer m.lock.Unlock()
details := m.details[machineName]
if details == nil {
return errors.New("machine not found")
}
details.Reason = fmt.Sprint(reason...)
details.State = machineStateRemoving
details.RetryCount = 0
details.logger().
WithField("now", time.Now()).
Warningln("Requesting machine removal")
details.Used = time.Now()
details.writeDebugInformation()
go m.finalizeRemoval(details)
return nil
}
func (m *machineProvider) updateMachine(
config *common.RunnerConfig,
data *machinesData,
details *machineDetails,
) error {
if details.State != machineStateIdle {
return nil
}
if config.Machine.MaxBuilds > 0 && details.UsedCount >= config.Machine.MaxBuilds {
// Limit number of builds
return errors.New("too many builds")
}
if data.Total() >= config.Limit && config.Limit > 0 {
// Limit maximum number of machines
return errors.New("too many machines")
}
if time.Since(details.Used) > time.Second*time.Duration(config.Machine.GetIdleTime()) {
if data.Idle >= config.Machine.GetIdleCount() {
// Remove machine that are way over the idle time
return errors.New("too many idle machines")
}
}
return nil
}
func (m *machineProvider) updateMachines(
machines []string,
config *common.RunnerConfig,
) (data machinesData, validMachines []string) {
data.Runner = config.ShortDescription()
validMachines = make([]string, 0, len(machines))
for _, name := range machines {
details := m.machineDetails(name, false)
details.LastSeen = time.Now()
err := m.updateMachine(config, &data, details)
if err == nil {
validMachines = append(validMachines, name)
} else {
_ = m.remove(details.Name, err)
}
data.Add(details)
}
return
}
func (m *machineProvider) createMachines(config *common.RunnerConfig, data *machinesData) {
// Create a new machines and mark them as Idle
for {
if data.Available() >= config.Machine.GetIdleCount() {
// Limit maximum number of idle machines
break
}
if data.Total() >= config.Limit && config.Limit > 0 {
// Limit maximum number of machines
break
}
if data.Creating >= config.Machine.MaxGrowthRate && config.Machine.MaxGrowthRate > 0 {
// Prevent excessive growth in the number of machines
break
}
m.create(config, machineStateIdle)
data.Creating++
}
}
// intermediateMachineList returns a list of machines that might not yet be
// persisted on disk, these machines are the ones between being virtually
// created, and `docker-machine create` getting executed we populate this data
// set to overcome the race conditions related to not-full set of machines
// returned by `docker-machine ls -q`
func (m *machineProvider) intermediateMachineList(excludedMachines []string) []string {
var excludedSet map[string]struct{}
var intermediateMachines []string
m.lock.Lock()
defer m.lock.Unlock()
for _, details := range m.details {
if details.isPersistedOnDisk() {
continue
}
// lazy init set, as most of times we don't create new machines
if excludedSet == nil {
excludedSet = make(map[string]struct{}, len(excludedMachines))
for _, excludedMachine := range excludedMachines {
excludedSet[excludedMachine] = struct{}{}
}
}
if _, ok := excludedSet[details.Name]; ok {
continue
}
intermediateMachines = append(intermediateMachines, details.Name)
}
return intermediateMachines
}
func (m *machineProvider) loadMachines(config *common.RunnerConfig) (machines []string, err error) {
machines, err = m.machine.List()
if err != nil {
return nil, err
}
machines = append(machines, m.intermediateMachineList(machines)...)
machines = filterMachineList(machines, machineFilter(config))
return
}
func (m *machineProvider) Acquire(config *common.RunnerConfig) (common.ExecutorData, error) {
if config.Machine == nil || config.Machine.MachineName == "" {
return nil, fmt.Errorf("missing Machine options")
}
// Lock updating machines, because two Acquires can be run at the same time
m.acquireLock.Lock()
defer m.acquireLock.Unlock()
machines, err := m.loadMachines(config)
if err != nil {
return nil, err
}
// Update a list of currently configured machines
machinesData, validMachines := m.updateMachines(machines, config)
// Pre-create machines
m.createMachines(config, &machinesData)
logrus.WithFields(machinesData.Fields()).
WithField("runner", config.ShortDescription()).
WithField("minIdleCount", config.Machine.GetIdleCount()).
WithField("maxMachines", config.Limit).
WithField("maxMachineCreate", config.Machine.MaxGrowthRate).
WithField("time", time.Now()).
Debugln("Docker Machine Details")
machinesData.writeDebugInformation()
// Try to find a free machine
details := m.findFreeMachine(false, validMachines...)
if details != nil {
return details, nil
}
// If we have a free machines we can process a build
if config.Machine.GetIdleCount() != 0 && machinesData.Idle == 0 {
err = errors.New("no free machines that can process builds")
}
return nil, err
}
//nolint:nakedret
func (m *machineProvider) Use(
config *common.RunnerConfig,
data common.ExecutorData,
) (newConfig common.RunnerConfig, newData common.ExecutorData, err error) {
// Find a new machine
details, _ := data.(*machineDetails)
if details == nil || !details.canBeUsed() || !m.machine.CanConnect(details.Name, true) {
details, err = m.retryUseMachine(config)
if err != nil {
return
}
// Return details only if this is a new instance
newData = details
}
// Get machine credentials
dc, err := m.machine.Credentials(details.Name)
if err != nil {
if newData != nil {
m.Release(config, newData)
}
newData = nil
return
}
// Create shallow copy of config and store in it docker credentials
newConfig = *config
newConfig.Docker = &common.DockerConfig{}
if config.Docker != nil {
*newConfig.Docker = *config.Docker
}
newConfig.Docker.Credentials = dc
// Mark machine as used
details.State = machineStateUsed
details.Used = time.Now()
details.UsedCount++
m.totalActions.WithLabelValues("used").Inc()
return
}
func (m *machineProvider) Release(config *common.RunnerConfig, data common.ExecutorData) {
// Release machine
details, ok := data.(*machineDetails)
if !ok {
return
}
m.lock.Lock()
// Mark last used time when is Used
if details.State == machineStateUsed {
details.Used = time.Now()
}
m.lock.Unlock()
// Remove machine if we already used it
if config != nil && config.Machine != nil &&
config.Machine.MaxBuilds > 0 && details.UsedCount >= config.Machine.MaxBuilds {
err := m.remove(details.Name, "Too many builds")
if err == nil {
return
}
}
m.lock.Lock()
details.State = machineStateIdle
m.lock.Unlock()
// Signal pending builds that a new machine is available.
if err := m.signalRelease(config); err != nil {
return
}
}
func (m *machineProvider) signalRelease(config *common.RunnerConfig) error {
coordinator, err := m.runnerMachinesCoordinator(config)
if err != nil && err != errNoConfig {
return err
}
if err != errNoConfig && coordinator != nil {
coordinator.addAvailableMachine()
}
return nil
}
func (m *machineProvider) CanCreate() bool {
return m.provider.CanCreate()
}
func (m *machineProvider) GetFeatures(features *common.FeaturesInfo) error {
return m.provider.GetFeatures(features)
}
func (m *machineProvider) GetDefaultShell() string {
return m.provider.GetDefaultShell()
}
func (m *machineProvider) Create() common.Executor {
return &machineExecutor{
provider: m,
}
}
func newMachineProvider(name, executor string) *machineProvider | {
provider := common.GetExecutorProvider(executor)
if provider == nil {
logrus.Panicln("Missing", executor)
}
return &machineProvider{
name: name,
details: make(machinesDetails),
runners: make(runnersDetails),
machine: docker.NewMachineCommand(),
provider: provider,
totalActions: prometheus.NewCounterVec(
prometheus.CounterOpts{
Name: "gitlab_runner_autoscaling_actions_total",
Help: "The total number of actions executed by the provider.",
ConstLabels: prometheus.Labels{
"executor": name,
},
},
[]string{"action"},
),
currentStatesDesc: prometheus.NewDesc(
"gitlab_runner_autoscaling_machine_states",
"The current number of machines per state in this provider.",
[]string{"state"},
prometheus.Labels{
"executor": name,
},
),
creationHistogram: prometheus.NewHistogram(
prometheus.HistogramOpts{
Name: "gitlab_runner_autoscaling_machine_creation_duration_seconds",
Help: "Histogram of machine creation time.",
Buckets: prometheus.ExponentialBuckets(30, 1.25, 10),
ConstLabels: prometheus.Labels{
"executor": name,
},
},
),
}
} |
|
remote.rs | use std::collections::HashMap;
use std::mem::drop;
use std::path::PathBuf;
use std::sync::Arc;
use std::time::{Duration, Instant};
use bazel_protos;
use boxfuture::{try_future, BoxFuture, Boxable};
use bytes::Bytes;
use digest::{Digest as DigestTrait, FixedOutput};
use fs::{self, File, PathStat};
use futures::{future, Future, Stream};
use grpcio;
use hashing::{Digest, Fingerprint};
use log::{debug, trace, warn};
use protobuf::{self, Message, ProtobufEnum};
use sha2::Sha256;
use store::{Snapshot, Store, StoreFileByDigest};
use time;
use time::Timespec;
use tokio_timer::Delay;
use super::{
ExecuteProcessRequest, ExecuteProcessRequestMetadata, ExecutionStats,
FallibleExecuteProcessResult,
};
use std;
use std::cmp::min;
use workunit_store::{generate_random_64bit_string, get_parent_id, WorkUnit, WorkUnitStore};
// Environment variable which is exclusively used for cache key invalidation.
// This may be not specified in an ExecuteProcessRequest, and may be populated only by the
// CommandRunner.
const CACHE_KEY_GEN_VERSION_ENV_VAR_NAME: &str = "PANTS_CACHE_KEY_GEN_VERSION";
#[derive(Debug)]
enum OperationOrStatus {
Operation(bazel_protos::operations::Operation),
Status(bazel_protos::status::Status),
}
#[derive(Clone)]
pub struct CommandRunner {
metadata: ExecuteProcessRequestMetadata,
authorization_header: Option<String>,
channel: grpcio::Channel,
env: Arc<grpcio::Environment>,
execution_client: Arc<bazel_protos::remote_execution_grpc::ExecutionClient>,
operations_client: Arc<bazel_protos::operations_grpc::OperationsClient>,
store: Store,
}
#[derive(Debug, PartialEq)]
enum ExecutionError {
// String is the error message.
Fatal(String),
// Digests are Files and Directories which have been reported to be missing. May be incomplete.
MissingDigests(Vec<Digest>),
// String is the operation name which can be used to poll the GetOperation gRPC API.
NotFinished(String),
}
#[derive(Default)]
struct ExecutionHistory {
attempts: Vec<ExecutionStats>,
current_attempt: ExecutionStats,
}
impl CommandRunner {
// The Execute API used to be unary, and became streaming. The contract of the streaming API is
// that if the client closes the stream after one request, it should continue to function exactly
// like the unary API.
// For maximal compatibility with servers, we fall back to this unary-like behavior, and control
// our own polling rates.
// In the future, we may want to remove this behavior if servers reliably support the full stream
// behavior.
fn oneshot_execute(
&self,
execute_request: &Arc<bazel_protos::remote_execution::ExecuteRequest>,
) -> BoxFuture<OperationOrStatus, String> {
let stream = try_future!(self
.execution_client
.execute_opt(&execute_request, self.call_option())
.map_err(rpcerror_to_string));
stream
.take(1)
.into_future()
// If there was a response, drop the _stream to disconnect so that the server doesn't keep
// the connection alive and continue sending on it.
.map(|(maybe_operation, stream)| {
drop(stream);
maybe_operation
})
// If there was an error, drop the _stream to disconnect so that the server doesn't keep the
// connection alive and continue sending on it.
.map_err(|(error, stream)| {
drop(stream);
error
})
.then(|maybe_operation_result| match maybe_operation_result {
Ok(Some(operation)) => Ok(OperationOrStatus::Operation(operation)),
Ok(None) => {
Err("Didn't get proper stream response from server during remote execution".to_owned())
}
Err(err) => rpcerror_to_status_or_string(err).map(OperationOrStatus::Status),
})
.to_boxed()
}
}
// TODO(pantsbuild/pants#8039) Need to impl Drop on command runner so that when the BoxFuture goes out of scope
// we cancel a potential RPC. So we need to distinguish local vs. remote
// requests and save enough state to BoxFuture or another abstraction around our execution results
impl super::CommandRunner for CommandRunner {
///
/// Runs a command via a gRPC service implementing the Bazel Remote Execution API
/// (https://docs.google.com/document/d/1AaGk7fOPByEvpAbqeXIyE8HX_A3_axxNnvroblTZ_6s/edit).
///
/// If the CommandRunner has a Store, files will be uploaded to the remote CAS as needed.
/// Note that it does not proactively upload files to a remote CAS. This is because if we will
/// get a cache hit, uploading the files was wasted time and bandwidth, and if the remote CAS
/// already has some files, uploading them all is a waste. Instead, we look at the responses we
/// get back from the server, and upload the files it says it's missing.
///
/// In the future, we may want to do some clever things like proactively upload files which the
/// user has changed, or files which aren't known to the local git repository, but these are
/// optimizations to shave off a round-trip in the future.
///
/// Loops until the server gives a response, either successful or error. Does not have any
/// timeout: polls in a tight loop.
///
/// TODO: Request jdk_home be created if set.
///
fn run(
&self,
req: ExecuteProcessRequest,
workunit_store: WorkUnitStore,
) -> BoxFuture<FallibleExecuteProcessResult, String> {
let operations_client = self.operations_client.clone();
let store = self.store.clone();
let execute_request_result = make_execute_request(&req, self.metadata.clone());
let ExecuteProcessRequest {
description,
timeout,
input_files,
..
} = req;
let description2 = description.clone();
match execute_request_result {
Ok((action, command, execute_request)) => {
let command_runner = self.clone();
let command_runner2 = self.clone();
let command_runner3 = self.clone();
let execute_request = Arc::new(execute_request);
let execute_request2 = execute_request.clone();
let store2 = store.clone();
let mut history = ExecutionHistory::default();
self
.store_proto_locally(&command)
.join(self.store_proto_locally(&action))
.and_then(move |(command_digest, action_digest)| {
store2.ensure_remote_has_recursive(vec![command_digest, action_digest, input_files])
})
.and_then(move |summary| {
history.current_attempt += summary;
trace!(
"Executing remotely request: {:?} (command: {:?})",
execute_request,
command
);
command_runner
.oneshot_execute(&execute_request)
.join(future::ok(history))
})
.and_then(move |(operation, history)| {
let start_time = Instant::now();
future::loop_fn(
(history, operation, 0),
move |(mut history, operation, iter_num)| {
let description = description.clone();
let execute_request2 = execute_request2.clone();
let store = store.clone();
let operations_client = operations_client.clone();
let command_runner2 = command_runner2.clone();
let command_runner3 = command_runner3.clone();
let f = command_runner2.extract_execute_response(
operation,
&mut history,
workunit_store.clone(),
);
f.map(future::Loop::Break).or_else(move |value| {
match value {
ExecutionError::Fatal(err) => future::err(err).to_boxed(),
ExecutionError::MissingDigests(missing_digests) => {
let ExecutionHistory {
mut attempts,
current_attempt,
} = history;
trace!(
"Server reported missing digests ({:?}); trying to upload: {:?}",
current_attempt,
missing_digests,
);
attempts.push(current_attempt);
let history = ExecutionHistory {
attempts,
current_attempt: ExecutionStats::default(),
};
let execute_request = execute_request2.clone();
store
.ensure_remote_has_recursive(missing_digests)
.and_then(move |summary| {
let mut history = history;
history.current_attempt += summary;
command_runner2
.oneshot_execute(&execute_request)
.join(future::ok(history))
})
// Reset `iter_num` on `MissingDigests`
.map(|(operation, history)| future::Loop::Continue((history, operation, 0)))
.to_boxed()
}
ExecutionError::NotFinished(operation_name) => {
let mut operation_request =
bazel_protos::operations::GetOperationRequest::new();
operation_request.set_name(operation_name.clone());
let backoff_period = min(
CommandRunner::BACKOFF_MAX_WAIT_MILLIS,
(1 + iter_num) * CommandRunner::BACKOFF_INCR_WAIT_MILLIS,
);
// take the grpc result and cancel the op if too much time has passed.
let elapsed = start_time.elapsed();
if elapsed > timeout {
future::err(format!(
"Exceeded time out of {:?} with {:?} for operation {}, {}",
timeout, elapsed, operation_name, description
))
.to_boxed()
} else {
// maybe the delay here should be the min of remaining time and the backoff period
Delay::new(Instant::now() + Duration::from_millis(backoff_period))
.map_err(move |e| {
format!(
"Future-Delay errored at operation result polling for {}, {}: {}",
operation_name, description, e
)
})
.and_then(move |_| {
future::done(
operations_client
.get_operation_opt(
&operation_request,
command_runner3.call_option(),
)
.or_else(move |err| {
rpcerror_recover_cancelled(operation_request.take_name(), err)
})
.map(OperationOrStatus::Operation)
.map_err(rpcerror_to_string),
)
.map(move |operation| {
future::Loop::Continue((history, operation, iter_num + 1))
})
.to_boxed()
})
.to_boxed()
}
}
}
})
},
)
})
.map(move |resp| {
let mut attempts = String::new();
for (i, attempt) in resp.execution_attempts.iter().enumerate() {
attempts += &format!("\nAttempt {}: {:?}", i, attempt);
}
debug!(
"Finished remote exceution of {} after {} attempts: Stats: {}",
description2,
resp.execution_attempts.len(),
attempts
);
resp
})
.to_boxed()
}
Err(err) => future::err(err).to_boxed(),
}
}
}
impl CommandRunner {
const BACKOFF_INCR_WAIT_MILLIS: u64 = 500;
const BACKOFF_MAX_WAIT_MILLIS: u64 = 5000;
pub fn new(
address: &str,
metadata: ExecuteProcessRequestMetadata,
root_ca_certs: Option<Vec<u8>>,
oauth_bearer_token: Option<String>,
store: Store,
) -> CommandRunner {
let env = Arc::new(grpcio::EnvBuilder::new().build());
let channel = {
let builder = grpcio::ChannelBuilder::new(env.clone());
if let Some(root_ca_certs) = root_ca_certs {
let creds = grpcio::ChannelCredentialsBuilder::new()
.root_cert(root_ca_certs)
.build();
builder.secure_connect(address, creds)
} else {
builder.connect(address)
}
};
let execution_client = Arc::new(bazel_protos::remote_execution_grpc::ExecutionClient::new(
channel.clone(),
));
let operations_client = Arc::new(bazel_protos::operations_grpc::OperationsClient::new(
channel.clone(),
));
CommandRunner {
metadata,
authorization_header: oauth_bearer_token.map(|t| format!("Bearer {}", t)),
channel,
env,
execution_client,
operations_client,
store,
}
}
fn call_option(&self) -> grpcio::CallOption {
let mut call_option = grpcio::CallOption::default();
if let Some(ref authorization_header) = self.authorization_header {
let mut builder = grpcio::MetadataBuilder::with_capacity(1);
builder
.add_str("authorization", &authorization_header)
.unwrap();
call_option = call_option.headers(builder.build());
}
call_option
}
fn store_proto_locally<P: protobuf::Message>(
&self,
proto: &P,
) -> impl Future<Item = Digest, Error = String> {
let store = self.store.clone();
future::done(
proto
.write_to_bytes()
.map_err(|e| format!("Error serializing proto {:?}", e)),
)
.and_then(move |command_bytes| store.store_file_bytes(Bytes::from(command_bytes), true))
.map_err(|e| format!("Error saving proto to local store: {:?}", e))
}
fn extract_execute_response(
&self,
operation_or_status: OperationOrStatus,
attempts: &mut ExecutionHistory,
workunit_store: WorkUnitStore,
) -> BoxFuture<FallibleExecuteProcessResult, ExecutionError> {
trace!("Got operation response: {:?}", operation_or_status);
let status = match operation_or_status {
OperationOrStatus::Operation(mut operation) => {
if !operation.get_done() {
return future::err(ExecutionError::NotFinished(operation.take_name())).to_boxed();
}
if operation.has_error() {
return future::err(ExecutionError::Fatal(format_error(&operation.get_error())))
.to_boxed();
}
if !operation.has_response() {
return future::err(ExecutionError::Fatal(
"Operation finished but no response supplied".to_string(),
))
.to_boxed();
}
let mut execute_response = bazel_protos::remote_execution::ExecuteResponse::new();
try_future!(execute_response
.merge_from_bytes(operation.get_response().get_value())
.map_err(|e| ExecutionError::Fatal(format!("Invalid ExecuteResponse: {:?}", e))));
trace!("Got (nested) execute response: {:?}", execute_response);
if execute_response.get_result().has_execution_metadata() {
let metadata = execute_response.get_result().get_execution_metadata();
let enqueued = timespec_from(metadata.get_queued_timestamp());
let worker_start = timespec_from(metadata.get_worker_start_timestamp());
let input_fetch_start = timespec_from(metadata.get_input_fetch_start_timestamp());
let input_fetch_completed = timespec_from(metadata.get_input_fetch_completed_timestamp());
let execution_start = timespec_from(metadata.get_execution_start_timestamp());
let execution_completed = timespec_from(metadata.get_execution_completed_timestamp());
let output_upload_start = timespec_from(metadata.get_output_upload_start_timestamp());
let output_upload_completed =
timespec_from(metadata.get_output_upload_completed_timestamp());
let parent_id = get_parent_id();
let result_cached = execute_response.get_cached_result();
match (worker_start - enqueued).to_std() {
Ok(duration) => {
attempts.current_attempt.remote_queue = Some(duration);
maybe_add_workunit(
result_cached,
"remote execution action scheduling",
enqueued,
worker_start,
parent_id.clone(),
&workunit_store,
);
}
Err(err) => warn!("Got negative remote queue time: {}", err),
}
match (input_fetch_completed - input_fetch_start).to_std() {
Ok(duration) => {
attempts.current_attempt.remote_input_fetch = Some(duration);
maybe_add_workunit(
result_cached,
"remote execution worker input fetching",
input_fetch_start,
input_fetch_completed,
parent_id.clone(),
&workunit_store,
);
}
Err(err) => warn!("Got negative remote input fetch time: {}", err),
}
match (execution_completed - execution_start).to_std() {
Ok(duration) => {
attempts.current_attempt.remote_execution = Some(duration);
maybe_add_workunit(
result_cached,
"remote execution worker command executing",
execution_start,
execution_completed,
parent_id.clone(),
&workunit_store,
);
}
Err(err) => warn!("Got negative remote execution time: {}", err),
}
match (output_upload_completed - output_upload_start).to_std() {
Ok(duration) => {
attempts.current_attempt.remote_output_store = Some(duration);
maybe_add_workunit(
result_cached,
"remote execution worker output uploading",
output_upload_start,
output_upload_completed,
parent_id,
&workunit_store,
);
}
Err(err) => warn!("Got negative remote output store time: {}", err),
}
attempts.current_attempt.was_cache_hit = execute_response.cached_result;
}
let mut execution_attempts = std::mem::replace(&mut attempts.attempts, vec![]);
execution_attempts.push(attempts.current_attempt);
let status = execute_response.take_status();
if grpcio::RpcStatusCode::from(status.get_code()) == grpcio::RpcStatusCode::Ok {
return populate_fallible_execution_result(
self.store.clone(),
execute_response,
execution_attempts,
)
.map_err(ExecutionError::Fatal)
.to_boxed();
}
status
}
OperationOrStatus::Status(status) => status,
};
match grpcio::RpcStatusCode::from(status.get_code()) {
grpcio::RpcStatusCode::Ok => unreachable!(),
grpcio::RpcStatusCode::FailedPrecondition => {
if status.get_details().len() != 1 {
return future::err(ExecutionError::Fatal(format!(
"Received multiple details in FailedPrecondition ExecuteResponse's status field: {:?}",
status.get_details()
)))
.to_boxed();
}
let details = status.get_details().get(0).unwrap();
let mut precondition_failure = bazel_protos::error_details::PreconditionFailure::new();
if details.get_type_url()
!= format!(
"type.googleapis.com/{}",
precondition_failure.descriptor().full_name()
)
{
return future::err(ExecutionError::Fatal(format!(
"Received FailedPrecondition, but didn't know how to resolve it: {},\
protobuf type {}",
status.get_message(),
details.get_type_url()
)))
.to_boxed();
}
try_future!(precondition_failure
.merge_from_bytes(details.get_value())
.map_err(|e| ExecutionError::Fatal(format!(
"Error deserializing FailedPrecondition proto: {:?}",
e
))));
let mut missing_digests = Vec::with_capacity(precondition_failure.get_violations().len());
for violation in precondition_failure.get_violations() {
if violation.get_field_type() != "MISSING" {
return future::err(ExecutionError::Fatal(format!(
"Didn't know how to process PreconditionFailure violation: {:?}",
violation
)))
.to_boxed();
}
let parts: Vec<_> = violation.get_subject().split('/').collect();
if parts.len() != 3 || parts[0] != "blobs" {
return future::err(ExecutionError::Fatal(format!(
"Received FailedPrecondition MISSING but didn't recognize subject {}",
violation.get_subject()
)))
.to_boxed();
}
let digest = Digest(
try_future!(Fingerprint::from_hex_string(parts[1]).map_err(|e| {
ExecutionError::Fatal(format!("Bad digest in missing blob: {}: {}", parts[1], e))
})),
try_future!(parts[2]
.parse::<usize>()
.map_err(|e| ExecutionError::Fatal(format!(
"Missing blob had bad size: {}: {}",
parts[2], e
)))),
);
missing_digests.push(digest);
}
if missing_digests.is_empty() {
return future::err(ExecutionError::Fatal(
"Error from remote execution: FailedPrecondition, but no details".to_owned(),
))
.to_boxed();
}
future::err(ExecutionError::MissingDigests(missing_digests)).to_boxed()
}
code => future::err(ExecutionError::Fatal(format!(
"Error from remote execution: {:?}: {:?}",
code,
status.get_message()
)))
.to_boxed(),
}
.to_boxed()
}
}
fn maybe_add_workunit(
result_cached: bool,
name: &str,
start_time: Timespec,
end_time: Timespec,
parent_id: Option<String>,
workunit_store: &WorkUnitStore,
) {
// TODO: workunits for scheduling, fetching, executing and uploading should be recorded
// only if '--reporting-zipkin-trace-v2' is set
if !result_cached {
let workunit = WorkUnit {
name: String::from(name),
start_timestamp: start_time,
end_timestamp: end_time,
span_id: generate_random_64bit_string(),
parent_id,
};
workunit_store.add_workunit(workunit);
}
}
pub fn make_execute_request(
req: &ExecuteProcessRequest,
metadata: ExecuteProcessRequestMetadata,
) -> Result<
(
bazel_protos::remote_execution::Action,
bazel_protos::remote_execution::Command,
bazel_protos::remote_execution::ExecuteRequest,
),
String,
> {
let mut command = bazel_protos::remote_execution::Command::new();
command.set_arguments(protobuf::RepeatedField::from_vec(req.argv.clone()));
for (ref name, ref value) in &req.env {
if name.as_str() == CACHE_KEY_GEN_VERSION_ENV_VAR_NAME {
return Err(format!(
"Cannot set env var with name {} as that is reserved for internal use by pants",
CACHE_KEY_GEN_VERSION_ENV_VAR_NAME
));
}
let mut env = bazel_protos::remote_execution::Command_EnvironmentVariable::new();
env.set_name(name.to_string());
env.set_value(value.to_string());
command.mut_environment_variables().push(env);
}
let ExecuteProcessRequestMetadata {
instance_name,
cache_key_gen_version,
mut platform_properties,
} = metadata;
if let Some(cache_key_gen_version) = cache_key_gen_version {
let mut env = bazel_protos::remote_execution::Command_EnvironmentVariable::new();
env.set_name(CACHE_KEY_GEN_VERSION_ENV_VAR_NAME.to_string());
env.set_value(cache_key_gen_version);
command.mut_environment_variables().push(env);
}
let mut output_files = req
.output_files
.iter()
.map(|p| {
p.to_str()
.map(str::to_owned)
.ok_or_else(|| format!("Non-UTF8 output file path: {:?}", p))
})
.collect::<Result<Vec<String>, String>>()?;
output_files.sort();
command.set_output_files(protobuf::RepeatedField::from_vec(output_files));
let mut output_directories = req
.output_directories
.iter()
.map(|p| {
p.to_str()
.map(str::to_owned)
.ok_or_else(|| format!("Non-UTF8 output directory path: {:?}", p))
})
.collect::<Result<Vec<String>, String>>()?;
output_directories.sort();
command.set_output_directories(protobuf::RepeatedField::from_vec(output_directories));
if req.jdk_home.is_some() {
// Ideally, the JDK would be brought along as part of the input directory, but we don't
// currently have support for that. Scoot supports this property, and will symlink .jdk to a
// system-installed JDK https://github.com/twitter/scoot/pull/391 - we should probably come to
// some kind of consensus across tools as to how this should work; RBE appears to work by
// allowing you to specify a jdk-version platform property, and it will put a JDK at a
// well-known path in the docker container you specify in which to run.
platform_properties.insert("JDK_SYMLINK".to_owned(), ".jdk".to_owned());
}
for (name, value) in platform_properties {
command.mut_platform().mut_properties().push({
let mut property = bazel_protos::remote_execution::Platform_Property::new();
property.set_name(name.clone());
property.set_value(value.clone());
property
});
}
let mut action = bazel_protos::remote_execution::Action::new();
action.set_command_digest((&digest(&command)?).into());
action.set_input_root_digest((&req.input_files).into());
let mut execute_request = bazel_protos::remote_execution::ExecuteRequest::new();
if let Some(instance_name) = instance_name {
execute_request.set_instance_name(instance_name);
}
execute_request.set_action_digest((&digest(&action)?).into());
Ok((action, command, execute_request))
}
pub fn populate_fallible_execution_result(
store: Store,
execute_response: bazel_protos::remote_execution::ExecuteResponse,
execution_attempts: Vec<ExecutionStats>,
) -> impl Future<Item = FallibleExecuteProcessResult, Error = String> {
extract_stdout(&store, &execute_response)
.join(extract_stderr(&store, &execute_response))
.join(extract_output_files(store, &execute_response))
.and_then(move |((stdout, stderr), output_directory)| {
Ok(FallibleExecuteProcessResult {
stdout: stdout,
stderr: stderr,
exit_code: execute_response.get_result().get_exit_code(),
output_directory: output_directory,
execution_attempts: execution_attempts,
})
})
}
fn extract_stdout(
store: &Store,
execute_response: &bazel_protos::remote_execution::ExecuteResponse,
) -> BoxFuture<Bytes, String> {
if execute_response.get_result().has_stdout_digest() {
let stdout_digest_result: Result<Digest, String> =
execute_response.get_result().get_stdout_digest().into();
let stdout_digest =
try_future!(stdout_digest_result.map_err(|err| format!("Error extracting stdout: {}", err)));
store
.load_file_bytes_with(stdout_digest, |v| v)
.map_err(move |error| {
format!(
"Error fetching stdout digest ({:?}): {:?}",
stdout_digest, error
)
})
.and_then(move |maybe_value| {
maybe_value.ok_or_else(|| {
format!(
"Couldn't find stdout digest ({:?}), when fetching.",
stdout_digest
)
})
})
.map(|(bytes, _metadata)| bytes)
.to_boxed()
} else {
let stdout_raw = Bytes::from(execute_response.get_result().get_stdout_raw());
let stdout_copy = stdout_raw.clone();
store
.store_file_bytes(stdout_raw, true)
.map_err(move |error| format!("Error storing raw stdout: {:?}", error))
.map(|_| stdout_copy)
.to_boxed()
}
}
fn extract_stderr(
store: &Store,
execute_response: &bazel_protos::remote_execution::ExecuteResponse,
) -> BoxFuture<Bytes, String> {
if execute_response.get_result().has_stderr_digest() {
let stderr_digest_result: Result<Digest, String> =
execute_response.get_result().get_stderr_digest().into();
let stderr_digest =
try_future!(stderr_digest_result.map_err(|err| format!("Error extracting stderr: {}", err)));
store
.load_file_bytes_with(stderr_digest, |v| v)
.map_err(move |error| {
format!(
"Error fetching stderr digest ({:?}): {:?}",
stderr_digest, error
)
})
.and_then(move |maybe_value| {
maybe_value.ok_or_else(|| {
format!(
"Couldn't find stderr digest ({:?}), when fetching.",
stderr_digest
)
})
})
.map(|(bytes, _metadata)| bytes)
.to_boxed()
} else {
let stderr_raw = Bytes::from(execute_response.get_result().get_stderr_raw());
let stderr_copy = stderr_raw.clone();
store
.store_file_bytes(stderr_raw, true)
.map_err(move |error| format!("Error storing raw stderr: {:?}", error))
.map(|_| stderr_copy)
.to_boxed()
}
}
fn extract_output_files(
store: Store,
execute_response: &bazel_protos::remote_execution::ExecuteResponse,
) -> BoxFuture<Digest, String> {
// Get Digests of output Directories.
// Then we'll make a Directory for the output files, and merge them.
let mut directory_digests =
Vec::with_capacity(execute_response.get_result().get_output_directories().len() + 1);
// TODO: Maybe take rather than clone
let output_directories = execute_response
.get_result()
.get_output_directories()
.to_owned();
for dir in output_directories {
let digest_result: Result<Digest, String> = dir.get_tree_digest().into();
let mut digest = future::done(digest_result).to_boxed();
if !dir.get_path().is_empty() {
for component in dir.get_path().rsplit('/') {
let component = component.to_owned();
let store = store.clone();
digest = digest
.and_then(move |digest| {
let mut directory = bazel_protos::remote_execution::Directory::new();
directory.mut_directories().push({
let mut node = bazel_protos::remote_execution::DirectoryNode::new();
node.set_name(component);
node.set_digest((&digest).into());
node
});
store.record_directory(&directory, true)
})
.to_boxed();
}
}
directory_digests
.push(digest.map_err(|err| format!("Error saving remote output directory: {}", err)));
}
// Make a directory for the files
let mut path_map = HashMap::new();
let path_stats_result: Result<Vec<PathStat>, String> = execute_response
.get_result()
.get_output_files()
.iter()
.map(|output_file| {
let output_file_path_buf = PathBuf::from(output_file.get_path());
let digest: Result<Digest, String> = output_file.get_digest().into();
path_map.insert(output_file_path_buf.clone(), digest?);
Ok(PathStat::file(
output_file_path_buf.clone(),
File {
path: output_file_path_buf,
is_executable: output_file.get_is_executable(),
},
))
})
.collect();
let path_stats = try_future!(path_stats_result);
#[derive(Clone)]
struct StoreOneOffRemoteDigest {
map_of_paths_to_digests: HashMap<PathBuf, Digest>,
}
impl StoreOneOffRemoteDigest {
fn new(map: HashMap<PathBuf, Digest>) -> StoreOneOffRemoteDigest {
StoreOneOffRemoteDigest {
map_of_paths_to_digests: map,
}
}
}
impl StoreFileByDigest<String> for StoreOneOffRemoteDigest {
fn store_by_digest(&self, file: File) -> BoxFuture<Digest, String> {
match self.map_of_paths_to_digests.get(&file.path) {
Some(digest) => future::ok(*digest),
None => future::err(format!(
"Didn't know digest for path in remote execution response: {:?}",
file.path
)),
}
.to_boxed()
}
}
let store = store.clone();
Snapshot::digest_from_path_stats(
store.clone(),
&StoreOneOffRemoteDigest::new(path_map),
&path_stats,
)
.map_err(move |error| {
format!(
"Error when storing the output file directory info in the remote CAS: {:?}",
error
)
})
.join(future::join_all(directory_digests))
.and_then(|(files_digest, mut directory_digests)| {
directory_digests.push(files_digest);
Snapshot::merge_directories(store, directory_digests)
.map_err(|err| format!("Error when merging output files and directories: {}", err))
})
.to_boxed()
}
fn format_error(error: &bazel_protos::status::Status) -> String {
let error_code_enum = bazel_protos::code::Code::from_i32(error.get_code());
let error_code = match error_code_enum {
Some(x) => format!("{:?}", x),
None => format!("{:?}", error.get_code()),
};
format!("{}: {}", error_code, error.get_message())
}
///
/// If the given operation represents a cancelled request, recover it into
/// ExecutionError::NotFinished.
///
fn rpcerror_recover_cancelled(
operation_name: String,
err: grpcio::Error,
) -> Result<bazel_protos::operations::Operation, grpcio::Error> |
fn rpcerror_to_status_or_string(
error: grpcio::Error,
) -> Result<bazel_protos::status::Status, String> {
match error {
grpcio::Error::RpcFailure(grpcio::RpcStatus {
status_proto_bytes: Some(status_proto_bytes),
..
}) => {
let mut status_proto = bazel_protos::status::Status::new();
status_proto.merge_from_bytes(&status_proto_bytes).unwrap();
Ok(status_proto)
}
grpcio::Error::RpcFailure(grpcio::RpcStatus {
status, details, ..
}) => Err(format!(
"{:?}: {:?}",
status,
details.unwrap_or_else(|| "[no message]".to_string())
)),
err => Err(format!("{:?}", err)),
}
}
fn rpcerror_to_string(error: grpcio::Error) -> String {
match error {
grpcio::Error::RpcFailure(status) => format!(
"{:?}: {:?}",
status.status,
status.details.unwrap_or_else(|| "[no message]".to_string())
),
err => format!("{:?}", err),
}
}
fn digest(message: &dyn Message) -> Result<Digest, String> {
let bytes = message.write_to_bytes().map_err(|e| format!("{:?}", e))?;
let mut hasher = Sha256::default();
hasher.input(&bytes);
Ok(Digest(
Fingerprint::from_bytes_unsafe(&hasher.fixed_result()),
bytes.len(),
))
}
fn timespec_from(timestamp: &protobuf::well_known_types::Timestamp) -> time::Timespec {
time::Timespec::new(timestamp.seconds, timestamp.nanos)
}
#[cfg(test)]
pub mod tests {
use bazel_protos;
use bazel_protos::operations::Operation;
use bazel_protos::remote_execution::ExecutedActionMetadata;
use bytes::Bytes;
use futures::Future;
use grpcio;
use hashing::{Digest, Fingerprint, EMPTY_DIGEST};
use mock;
use protobuf::{self, Message, ProtobufEnum};
use store::Store;
use tempfile::TempDir;
use testutil::data::{TestData, TestDirectory};
use testutil::{as_bytes, owned_string_vec};
use super::super::CommandRunner as CommandRunnerTrait;
use super::{
CommandRunner, ExecuteProcessRequest, ExecuteProcessRequestMetadata, ExecutionError,
ExecutionHistory, FallibleExecuteProcessResult,
};
use maplit::hashset;
use mock::execution_server::MockOperation;
use protobuf::well_known_types::Timestamp;
use std::collections::{BTreeMap, BTreeSet};
use std::iter::{self, FromIterator};
use std::ops::Sub;
use std::path::PathBuf;
use std::time::Duration;
use time::Timespec;
use workunit_store::{workunits_with_constant_span_id, WorkUnit, WorkUnitStore};
#[derive(Debug, PartialEq)]
enum StdoutType {
Raw(String),
Digest(Digest),
}
#[derive(Debug, PartialEq)]
enum StderrType {
Raw(String),
Digest(Digest),
}
#[test]
fn make_execute_request() {
let input_directory = TestDirectory::containing_roland();
let req = ExecuteProcessRequest {
argv: owned_string_vec(&["/bin/echo", "yo"]),
env: vec![("SOME".to_owned(), "value".to_owned())]
.into_iter()
.collect(),
input_files: input_directory.digest(),
// Intentionally poorly sorted:
output_files: vec!["path/to/file", "other/file"]
.into_iter()
.map(PathBuf::from)
.collect(),
output_directories: vec!["directory/name"]
.into_iter()
.map(PathBuf::from)
.collect(),
timeout: Duration::from_millis(1000),
description: "some description".to_owned(),
jdk_home: None,
};
let mut want_command = bazel_protos::remote_execution::Command::new();
want_command.mut_arguments().push("/bin/echo".to_owned());
want_command.mut_arguments().push("yo".to_owned());
want_command.mut_environment_variables().push({
let mut env = bazel_protos::remote_execution::Command_EnvironmentVariable::new();
env.set_name("SOME".to_owned());
env.set_value("value".to_owned());
env
});
want_command
.mut_output_files()
.push("other/file".to_owned());
want_command
.mut_output_files()
.push("path/to/file".to_owned());
want_command
.mut_output_directories()
.push("directory/name".to_owned());
let mut want_action = bazel_protos::remote_execution::Action::new();
want_action.set_command_digest(
(&Digest(
Fingerprint::from_hex_string(
"cc4ddd3085aaffbe0abce22f53b30edbb59896bb4a4f0d76219e48070cd0afe1",
)
.unwrap(),
72,
))
.into(),
);
want_action.set_input_root_digest((&input_directory.digest()).into());
let mut want_execute_request = bazel_protos::remote_execution::ExecuteRequest::new();
want_execute_request.set_action_digest(
(&Digest(
Fingerprint::from_hex_string(
"844c929423444f3392e0dcc89ebf1febbfdf3a2e2fcab7567cc474705a5385e4",
)
.unwrap(),
140,
))
.into(),
);
assert_eq!(
super::make_execute_request(&req, empty_request_metadata()),
Ok((want_action, want_command, want_execute_request))
);
}
#[test]
fn make_execute_request_with_instance_name() {
let input_directory = TestDirectory::containing_roland();
let req = ExecuteProcessRequest {
argv: owned_string_vec(&["/bin/echo", "yo"]),
env: vec![("SOME".to_owned(), "value".to_owned())]
.into_iter()
.collect(),
input_files: input_directory.digest(),
// Intentionally poorly sorted:
output_files: vec!["path/to/file", "other/file"]
.into_iter()
.map(PathBuf::from)
.collect(),
output_directories: vec!["directory/name"]
.into_iter()
.map(PathBuf::from)
.collect(),
timeout: Duration::from_millis(1000),
description: "some description".to_owned(),
jdk_home: None,
};
let mut want_command = bazel_protos::remote_execution::Command::new();
want_command.mut_arguments().push("/bin/echo".to_owned());
want_command.mut_arguments().push("yo".to_owned());
want_command.mut_environment_variables().push({
let mut env = bazel_protos::remote_execution::Command_EnvironmentVariable::new();
env.set_name("SOME".to_owned());
env.set_value("value".to_owned());
env
});
want_command
.mut_output_files()
.push("other/file".to_owned());
want_command
.mut_output_files()
.push("path/to/file".to_owned());
want_command
.mut_output_directories()
.push("directory/name".to_owned());
let mut want_action = bazel_protos::remote_execution::Action::new();
want_action.set_command_digest(
(&Digest(
Fingerprint::from_hex_string(
"cc4ddd3085aaffbe0abce22f53b30edbb59896bb4a4f0d76219e48070cd0afe1",
)
.unwrap(),
72,
))
.into(),
);
want_action.set_input_root_digest((&input_directory.digest()).into());
let mut want_execute_request = bazel_protos::remote_execution::ExecuteRequest::new();
want_execute_request.set_instance_name("dark-tower".to_owned());
want_execute_request.set_action_digest(
(&Digest(
Fingerprint::from_hex_string(
"844c929423444f3392e0dcc89ebf1febbfdf3a2e2fcab7567cc474705a5385e4",
)
.unwrap(),
140,
))
.into(),
);
assert_eq!(
super::make_execute_request(
&req,
ExecuteProcessRequestMetadata {
instance_name: Some("dark-tower".to_owned()),
cache_key_gen_version: None,
platform_properties: BTreeMap::new(),
}
),
Ok((want_action, want_command, want_execute_request))
);
}
#[test]
fn make_execute_request_with_cache_key_gen_version() {
let input_directory = TestDirectory::containing_roland();
let req = ExecuteProcessRequest {
argv: owned_string_vec(&["/bin/echo", "yo"]),
env: vec![("SOME".to_owned(), "value".to_owned())]
.into_iter()
.collect(),
input_files: input_directory.digest(),
// Intentionally poorly sorted:
output_files: vec!["path/to/file", "other/file"]
.into_iter()
.map(PathBuf::from)
.collect(),
output_directories: vec!["directory/name"]
.into_iter()
.map(PathBuf::from)
.collect(),
timeout: Duration::from_millis(1000),
description: "some description".to_owned(),
jdk_home: None,
};
let mut want_command = bazel_protos::remote_execution::Command::new();
want_command.mut_arguments().push("/bin/echo".to_owned());
want_command.mut_arguments().push("yo".to_owned());
want_command.mut_environment_variables().push({
let mut env = bazel_protos::remote_execution::Command_EnvironmentVariable::new();
env.set_name("SOME".to_owned());
env.set_value("value".to_owned());
env
});
want_command.mut_environment_variables().push({
let mut env = bazel_protos::remote_execution::Command_EnvironmentVariable::new();
env.set_name(super::CACHE_KEY_GEN_VERSION_ENV_VAR_NAME.to_owned());
env.set_value("meep".to_owned());
env
});
want_command
.mut_output_files()
.push("other/file".to_owned());
want_command
.mut_output_files()
.push("path/to/file".to_owned());
want_command
.mut_output_directories()
.push("directory/name".to_owned());
let mut want_action = bazel_protos::remote_execution::Action::new();
want_action.set_command_digest(
(&Digest(
Fingerprint::from_hex_string(
"1a95e3482dd235593df73dc12b808ec7d922733a40d97d8233c1a32c8610a56d",
)
.unwrap(),
109,
))
.into(),
);
want_action.set_input_root_digest((&input_directory.digest()).into());
let mut want_execute_request = bazel_protos::remote_execution::ExecuteRequest::new();
want_execute_request.set_action_digest(
(&Digest(
Fingerprint::from_hex_string(
"0ee5d4c8ac12513a87c8d949c6883ac533a264d30215126af71a9028c4ab6edf",
)
.unwrap(),
140,
))
.into(),
);
assert_eq!(
super::make_execute_request(
&req,
ExecuteProcessRequestMetadata {
instance_name: None,
cache_key_gen_version: Some("meep".to_owned()),
platform_properties: BTreeMap::new(),
}
),
Ok((want_action, want_command, want_execute_request))
);
}
#[test]
fn make_execute_request_with_jdk() {
let input_directory = TestDirectory::containing_roland();
let req = ExecuteProcessRequest {
argv: owned_string_vec(&["/bin/echo", "yo"]),
env: BTreeMap::new(),
input_files: input_directory.digest(),
output_files: BTreeSet::new(),
output_directories: BTreeSet::new(),
timeout: Duration::from_millis(1000),
description: "some description".to_owned(),
jdk_home: Some(PathBuf::from("/tmp")),
};
let mut want_command = bazel_protos::remote_execution::Command::new();
want_command.mut_arguments().push("/bin/echo".to_owned());
want_command.mut_arguments().push("yo".to_owned());
want_command.mut_platform().mut_properties().push({
let mut property = bazel_protos::remote_execution::Platform_Property::new();
property.set_name("JDK_SYMLINK".to_owned());
property.set_value(".jdk".to_owned());
property
});
let mut want_action = bazel_protos::remote_execution::Action::new();
want_action.set_command_digest(
(&Digest(
Fingerprint::from_hex_string(
"f373f421b328ddeedfba63542845c0423d7730f428dd8e916ec6a38243c98448",
)
.unwrap(),
38,
))
.into(),
);
want_action.set_input_root_digest((&input_directory.digest()).into());
let mut want_execute_request = bazel_protos::remote_execution::ExecuteRequest::new();
want_execute_request.set_action_digest(
(&Digest(
Fingerprint::from_hex_string(
"b1fb7179ce496995a4e3636544ec000dca1b951f1f6216493f6c7608dc4dd910",
)
.unwrap(),
140,
))
.into(),
);
assert_eq!(
super::make_execute_request(&req, empty_request_metadata()),
Ok((want_action, want_command, want_execute_request))
);
}
#[test]
fn make_execute_request_with_jdk_and_extra_platform_properties() {
let input_directory = TestDirectory::containing_roland();
let req = ExecuteProcessRequest {
argv: owned_string_vec(&["/bin/echo", "yo"]),
env: BTreeMap::new(),
input_files: input_directory.digest(),
output_files: BTreeSet::new(),
output_directories: BTreeSet::new(),
timeout: Duration::from_millis(1000),
description: "some description".to_owned(),
jdk_home: Some(PathBuf::from("/tmp")),
};
let mut want_command = bazel_protos::remote_execution::Command::new();
want_command.mut_arguments().push("/bin/echo".to_owned());
want_command.mut_arguments().push("yo".to_owned());
want_command.mut_platform().mut_properties().push({
let mut property = bazel_protos::remote_execution::Platform_Property::new();
property.set_name("FIRST".to_owned());
property.set_value("foo".to_owned());
property
});
want_command.mut_platform().mut_properties().push({
let mut property = bazel_protos::remote_execution::Platform_Property::new();
property.set_name("JDK_SYMLINK".to_owned());
property.set_value(".jdk".to_owned());
property
});
want_command.mut_platform().mut_properties().push({
let mut property = bazel_protos::remote_execution::Platform_Property::new();
property.set_name("last".to_owned());
property.set_value("bar".to_owned());
property
});
let mut want_action = bazel_protos::remote_execution::Action::new();
want_action.set_command_digest(
(&Digest(
Fingerprint::from_hex_string(
"a809e7c54a105e7d98cc61558ac13ca3c05a5e1cb33326dfde189c72887dac29",
)
.unwrap(),
65,
))
.into(),
);
want_action.set_input_root_digest((&input_directory.digest()).into());
let mut want_execute_request = bazel_protos::remote_execution::ExecuteRequest::new();
want_execute_request.set_action_digest(
(&Digest(
Fingerprint::from_hex_string(
"3d8d2a0282cb45b365b338f80ddab039dfa461dadde053e12bd5c3ab3329d928",
)
.unwrap(),
140,
))
.into(),
);
assert_eq!(
super::make_execute_request(
&req,
ExecuteProcessRequestMetadata {
instance_name: None,
cache_key_gen_version: None,
platform_properties: vec![
("FIRST".to_owned(), "foo".to_owned()),
("last".to_owned(), "bar".to_owned())
]
.into_iter()
.collect()
},
),
Ok((want_action, want_command, want_execute_request))
);
}
#[test]
fn server_rejecting_execute_request_gives_error() {
let execute_request = echo_foo_request();
let mock_server = {
mock::execution_server::TestServer::new(mock::execution_server::MockExecution::new(
"wrong-command".to_string(),
super::make_execute_request(
&ExecuteProcessRequest {
argv: owned_string_vec(&["/bin/echo", "-n", "bar"]),
env: BTreeMap::new(),
input_files: EMPTY_DIGEST,
output_files: BTreeSet::new(),
output_directories: BTreeSet::new(),
timeout: Duration::from_millis(1000),
description: "wrong command".to_string(),
jdk_home: None,
},
empty_request_metadata(),
)
.unwrap()
.2,
vec![],
))
};
let error = run_command_remote(mock_server.address(), execute_request).expect_err("Want Err");
assert_eq!(
error,
"InvalidArgument: \"Did not expect this request\"".to_string()
);
}
#[test]
fn successful_execution_after_one_getoperation() {
let execute_request = echo_foo_request();
let mock_server = {
let op_name = "gimme-foo".to_string();
mock::execution_server::TestServer::new(mock::execution_server::MockExecution::new(
op_name.clone(),
super::make_execute_request(&execute_request, empty_request_metadata())
.unwrap()
.2,
vec![
make_incomplete_operation(&op_name),
make_successful_operation(
&op_name,
StdoutType::Raw("foo".to_owned()),
StderrType::Raw("".to_owned()),
0,
),
],
))
};
let result = run_command_remote(mock_server.address(), execute_request).unwrap();
assert_eq!(
result.without_execution_attempts(),
FallibleExecuteProcessResult {
stdout: as_bytes("foo"),
stderr: as_bytes(""),
exit_code: 0,
output_directory: EMPTY_DIGEST,
execution_attempts: vec![],
}
);
}
#[test]
fn extract_response_with_digest_stdout() {
let op_name = "gimme-foo".to_string();
let testdata = TestData::roland();
let testdata_empty = TestData::empty();
assert_eq!(
extract_execute_response(
make_successful_operation(
&op_name,
StdoutType::Digest(testdata.digest()),
StderrType::Raw(testdata_empty.string()),
0,
)
.op
.unwrap()
.unwrap()
)
.unwrap()
.without_execution_attempts(),
FallibleExecuteProcessResult {
stdout: testdata.bytes(),
stderr: testdata_empty.bytes(),
exit_code: 0,
output_directory: EMPTY_DIGEST,
execution_attempts: vec![],
}
);
}
#[test]
fn extract_response_with_digest_stderr() {
let op_name = "gimme-foo".to_string();
let testdata = TestData::roland();
let testdata_empty = TestData::empty();
assert_eq!(
extract_execute_response(
make_successful_operation(
&op_name,
StdoutType::Raw(testdata_empty.string()),
StderrType::Digest(testdata.digest()),
0,
)
.op
.unwrap()
.unwrap()
)
.unwrap()
.without_execution_attempts(),
FallibleExecuteProcessResult {
stdout: testdata_empty.bytes(),
stderr: testdata.bytes(),
exit_code: 0,
output_directory: EMPTY_DIGEST,
execution_attempts: vec![],
}
);
}
#[test]
fn ensure_inline_stdio_is_stored() {
let runtime = task_executor::Executor::new();
let test_stdout = TestData::roland();
let test_stderr = TestData::catnip();
let mock_server = {
let op_name = "cat".to_owned();
mock::execution_server::TestServer::new(mock::execution_server::MockExecution::new(
op_name.clone(),
super::make_execute_request(&echo_roland_request(), empty_request_metadata())
.unwrap()
.2,
vec![make_successful_operation(
&op_name.clone(),
StdoutType::Raw(test_stdout.string()),
StderrType::Raw(test_stderr.string()),
0,
)],
))
};
let store_dir = TempDir::new().unwrap();
let store_dir_path = store_dir.path();
let cas = mock::StubCAS::empty();
let store = Store::with_remote(
runtime.clone(),
&store_dir_path,
&[cas.address()],
None,
&None,
None,
1,
10 * 1024 * 1024,
Duration::from_secs(1),
store::BackoffConfig::new(Duration::from_millis(10), 1.0, Duration::from_millis(10)).unwrap(),
1,
)
.expect("Failed to make store");
let cmd_runner = CommandRunner::new(
&mock_server.address(),
empty_request_metadata(),
None,
None,
store,
);
let result = runtime
.block_on(cmd_runner.run(echo_roland_request(), WorkUnitStore::new()))
.unwrap();
assert_eq!(
result.without_execution_attempts(),
FallibleExecuteProcessResult {
stdout: test_stdout.bytes(),
stderr: test_stderr.bytes(),
exit_code: 0,
output_directory: EMPTY_DIGEST,
execution_attempts: vec![],
}
);
let local_store =
Store::local_only(runtime.clone(), &store_dir_path).expect("Error creating local store");
{
assert_eq!(
runtime
.block_on(local_store.load_file_bytes_with(test_stdout.digest(), |v| v))
.unwrap()
.unwrap()
.0,
test_stdout.bytes()
);
assert_eq!(
runtime
.block_on(local_store.load_file_bytes_with(test_stderr.digest(), |v| v))
.unwrap()
.unwrap()
.0,
test_stderr.bytes()
);
}
}
#[test]
fn successful_execution_after_four_getoperations() {
let execute_request = echo_foo_request();
let mock_server = {
let op_name = "gimme-foo".to_string();
mock::execution_server::TestServer::new(mock::execution_server::MockExecution::new(
op_name.clone(),
super::make_execute_request(&execute_request, empty_request_metadata())
.unwrap()
.2,
Vec::from_iter(
iter::repeat(make_incomplete_operation(&op_name))
.take(4)
.chain(iter::once(make_successful_operation(
&op_name,
StdoutType::Raw("foo".to_owned()),
StderrType::Raw("".to_owned()),
0,
))),
),
))
};
let result = run_command_remote(mock_server.address(), execute_request).unwrap();
assert_eq!(
result.without_execution_attempts(),
FallibleExecuteProcessResult {
stdout: as_bytes("foo"),
stderr: as_bytes(""),
exit_code: 0,
output_directory: EMPTY_DIGEST,
execution_attempts: vec![],
}
);
}
#[test]
fn timeout_after_sufficiently_delayed_getoperations() {
let request_timeout = Duration::new(4, 0);
let delayed_operation_time = Duration::new(5, 0);
let execute_request = ExecuteProcessRequest {
argv: owned_string_vec(&["/bin/echo", "-n", "foo"]),
env: BTreeMap::new(),
input_files: EMPTY_DIGEST,
output_files: BTreeSet::new(),
output_directories: BTreeSet::new(),
timeout: request_timeout,
description: "echo-a-foo".to_string(),
jdk_home: None,
};
let mock_server = {
let op_name = "gimme-foo".to_string();
mock::execution_server::TestServer::new(mock::execution_server::MockExecution::new(
op_name.clone(),
super::make_execute_request(&execute_request, empty_request_metadata())
.unwrap()
.2,
vec![
make_incomplete_operation(&op_name),
make_delayed_incomplete_operation(&op_name, delayed_operation_time),
],
))
};
let error_msg = run_command_remote(mock_server.address(), execute_request)
.expect_err("Timeout did not cause failure.");
assert_contains(&error_msg, "Exceeded time out");
assert_contains(&error_msg, "echo-a-foo");
}
#[test]
fn retry_for_cancelled_channel() {
let execute_request = echo_foo_request();
let mock_server = {
let op_name = "gimme-foo".to_string();
mock::execution_server::TestServer::new(mock::execution_server::MockExecution::new(
op_name.clone(),
super::make_execute_request(&execute_request, empty_request_metadata())
.unwrap()
.2,
vec![
make_incomplete_operation(&op_name),
make_canceled_operation(Some(Duration::from_millis(100))),
make_successful_operation(
&op_name,
StdoutType::Raw("foo".to_owned()),
StderrType::Raw("".to_owned()),
0,
),
],
))
};
let result = run_command_remote(mock_server.address(), execute_request).unwrap();
assert_eq!(
result.without_execution_attempts(),
FallibleExecuteProcessResult {
stdout: as_bytes("foo"),
stderr: as_bytes(""),
exit_code: 0,
output_directory: EMPTY_DIGEST,
execution_attempts: vec![],
}
);
}
#[test]
fn bad_result_bytes() {
let execute_request = echo_foo_request();
let mock_server = {
let op_name = "gimme-foo".to_string();
mock::execution_server::TestServer::new(mock::execution_server::MockExecution::new(
op_name.clone(),
super::make_execute_request(&execute_request, empty_request_metadata())
.unwrap()
.2,
vec![
make_incomplete_operation(&op_name),
MockOperation::new({
let mut op = bazel_protos::operations::Operation::new();
op.set_name(op_name.clone());
op.set_done(true);
op.set_response({
let mut response_wrapper = protobuf::well_known_types::Any::new();
response_wrapper.set_type_url(format!(
"type.googleapis.com/{}",
bazel_protos::remote_execution::ExecuteResponse::new()
.descriptor()
.full_name()
));
response_wrapper.set_value(vec![0x00, 0x00, 0x00]);
response_wrapper
});
op
}),
],
))
};
run_command_remote(mock_server.address(), execute_request).expect_err("Want Err");
}
#[test]
fn initial_response_error() {
let execute_request = echo_foo_request();
let mock_server = {
let op_name = "gimme-foo".to_string();
mock::execution_server::TestServer::new(mock::execution_server::MockExecution::new(
op_name.clone(),
super::make_execute_request(&execute_request, empty_request_metadata())
.unwrap()
.2,
vec![MockOperation::new({
let mut op = bazel_protos::operations::Operation::new();
op.set_name(op_name.to_string());
op.set_done(true);
op.set_error({
let mut error = bazel_protos::status::Status::new();
error.set_code(bazel_protos::code::Code::INTERNAL.value());
error.set_message("Something went wrong".to_string());
error
});
op
})],
))
};
let result = run_command_remote(mock_server.address(), execute_request).expect_err("Want Err");
assert_eq!(result, "INTERNAL: Something went wrong");
}
#[test]
fn getoperation_response_error() {
let execute_request = echo_foo_request();
let mock_server = {
let op_name = "gimme-foo".to_string();
mock::execution_server::TestServer::new(mock::execution_server::MockExecution::new(
op_name.clone(),
super::make_execute_request(&execute_request, empty_request_metadata())
.unwrap()
.2,
vec![
make_incomplete_operation(&op_name),
MockOperation::new({
let mut op = bazel_protos::operations::Operation::new();
op.set_name(op_name.to_string());
op.set_done(true);
op.set_error({
let mut error = bazel_protos::status::Status::new();
error.set_code(bazel_protos::code::Code::INTERNAL.value());
error.set_message("Something went wrong".to_string());
error
});
op
}),
],
))
};
let result = run_command_remote(mock_server.address(), execute_request).expect_err("Want Err");
assert_eq!(result, "INTERNAL: Something went wrong");
}
#[test]
fn initial_response_missing_response_and_error() {
let execute_request = echo_foo_request();
let mock_server = {
let op_name = "gimme-foo".to_string();
mock::execution_server::TestServer::new(mock::execution_server::MockExecution::new(
op_name.clone(),
super::make_execute_request(&execute_request, empty_request_metadata())
.unwrap()
.2,
vec![MockOperation::new({
let mut op = bazel_protos::operations::Operation::new();
op.set_name(op_name.to_string());
op.set_done(true);
op
})],
))
};
let result = run_command_remote(mock_server.address(), execute_request).expect_err("Want Err");
assert_eq!(result, "Operation finished but no response supplied");
}
#[test]
fn getoperation_missing_response_and_error() {
let execute_request = echo_foo_request();
let mock_server = {
let op_name = "gimme-foo".to_string();
mock::execution_server::TestServer::new(mock::execution_server::MockExecution::new(
op_name.clone(),
super::make_execute_request(&execute_request, empty_request_metadata())
.unwrap()
.2,
vec![
make_incomplete_operation(&op_name),
MockOperation::new({
let mut op = bazel_protos::operations::Operation::new();
op.set_name(op_name.to_string());
op.set_done(true);
op
}),
],
))
};
let result = run_command_remote(mock_server.address(), execute_request).expect_err("Want Err");
assert_eq!(result, "Operation finished but no response supplied");
}
#[test]
fn execute_missing_file_uploads_if_known() {
let runtime = task_executor::Executor::new();
let roland = TestData::roland();
let mock_server = {
let op_name = "cat".to_owned();
mock::execution_server::TestServer::new(mock::execution_server::MockExecution::new(
op_name.clone(),
super::make_execute_request(&cat_roland_request(), empty_request_metadata())
.unwrap()
.2,
vec![
make_incomplete_operation(&op_name),
make_precondition_failure_operation(vec![missing_preconditionfailure_violation(
&roland.digest(),
)]),
make_successful_operation(
"cat2",
StdoutType::Raw(roland.string()),
StderrType::Raw("".to_owned()),
0,
),
],
))
};
let store_dir = TempDir::new().unwrap();
let cas = mock::StubCAS::builder()
.directory(&TestDirectory::containing_roland())
.build();
let store = Store::with_remote(
runtime.clone(),
store_dir,
&[cas.address()],
None,
&None,
None,
1,
10 * 1024 * 1024,
Duration::from_secs(1),
store::BackoffConfig::new(Duration::from_millis(10), 1.0, Duration::from_millis(10)).unwrap(),
1,
)
.expect("Failed to make store");
runtime
.block_on(store.store_file_bytes(roland.bytes(), false))
.expect("Saving file bytes to store");
runtime
.block_on(store.record_directory(&TestDirectory::containing_roland().directory(), false))
.expect("Saving directory bytes to store");
let command_runner = CommandRunner::new(
&mock_server.address(),
empty_request_metadata(),
None,
None,
store,
);
let result = runtime
.block_on(command_runner.run(cat_roland_request(), WorkUnitStore::new()))
.unwrap();
assert_eq!(
result.without_execution_attempts(),
FallibleExecuteProcessResult {
stdout: roland.bytes(),
stderr: Bytes::from(""),
exit_code: 0,
output_directory: EMPTY_DIGEST,
execution_attempts: vec![],
}
);
{
let blobs = cas.blobs.lock();
assert_eq!(blobs.get(&roland.fingerprint()), Some(&roland.bytes()));
}
}
//#[test] // TODO: Unignore this test when the server can actually fail with status protos.
// See https://github.com/pantsbuild/pants/issues/6597
#[allow(dead_code)]
fn execute_missing_file_uploads_if_known_status() {
let roland = TestData::roland();
let mock_server = {
let op_name = "cat".to_owned();
let status = grpcio::RpcStatus {
status: grpcio::RpcStatusCode::FailedPrecondition,
details: None,
status_proto_bytes: Some(
make_precondition_failure_status(vec![missing_preconditionfailure_violation(
&roland.digest(),
)])
.write_to_bytes()
.unwrap(),
),
};
mock::execution_server::TestServer::new(mock::execution_server::MockExecution::new(
op_name.clone(),
super::make_execute_request(&cat_roland_request(), empty_request_metadata())
.unwrap()
.2,
vec![
//make_incomplete_operation(&op_name),
MockOperation {
op: Err(status),
duration: None,
},
make_successful_operation(
"cat2",
StdoutType::Raw(roland.string()),
StderrType::Raw("".to_owned()),
0,
),
],
))
};
let store_dir = TempDir::new().unwrap();
let cas = mock::StubCAS::builder()
.directory(&TestDirectory::containing_roland())
.build();
let store = Store::with_remote(
task_executor::Executor::new(),
store_dir,
&[cas.address()],
None,
&None,
None,
1,
10 * 1024 * 1024,
Duration::from_secs(1),
store::BackoffConfig::new(Duration::from_millis(10), 1.0, Duration::from_millis(10)).unwrap(),
1,
)
.expect("Failed to make store");
store
.store_file_bytes(roland.bytes(), false)
.wait()
.expect("Saving file bytes to store");
let result = CommandRunner::new(
&mock_server.address(),
empty_request_metadata(),
None,
None,
store,
)
.run(cat_roland_request(), WorkUnitStore::new())
.wait();
assert_eq!(
result,
Ok(FallibleExecuteProcessResult {
stdout: roland.bytes(),
stderr: Bytes::from(""),
exit_code: 0,
output_directory: EMPTY_DIGEST,
execution_attempts: vec![],
})
);
{
let blobs = cas.blobs.lock();
assert_eq!(blobs.get(&roland.fingerprint()), Some(&roland.bytes()));
}
}
#[test]
fn execute_missing_file_errors_if_unknown() {
let missing_digest = TestDirectory::containing_roland().digest();
let mock_server = {
let op_name = "cat".to_owned();
mock::execution_server::TestServer::new(mock::execution_server::MockExecution::new(
op_name.clone(),
super::make_execute_request(&cat_roland_request(), empty_request_metadata())
.unwrap()
.2,
// We won't get as far as trying to run the operation, so don't expect any requests whose
// responses we would need to stub.
vec![],
))
};
let store_dir = TempDir::new().unwrap();
let cas = mock::StubCAS::builder()
.file(&TestData::roland())
.directory(&TestDirectory::containing_roland())
.build();
let runtime = task_executor::Executor::new();
let store = Store::with_remote(
runtime.clone(),
store_dir,
&[cas.address()],
None,
&None,
None,
1,
10 * 1024 * 1024,
Duration::from_secs(1),
store::BackoffConfig::new(Duration::from_millis(10), 1.0, Duration::from_millis(10)).unwrap(),
1,
)
.expect("Failed to make store");
let runner = CommandRunner::new(
&mock_server.address(),
empty_request_metadata(),
None,
None,
store,
);
let error = runtime
.block_on(runner.run(cat_roland_request(), WorkUnitStore::new()))
.expect_err("Want error");
assert_contains(&error, &format!("{}", missing_digest.0));
}
#[test]
fn format_error_complete() {
let mut error = bazel_protos::status::Status::new();
error.set_code(bazel_protos::code::Code::CANCELLED.value());
error.set_message("Oops, oh well!".to_string());
assert_eq!(
super::format_error(&error),
"CANCELLED: Oops, oh well!".to_string()
);
}
#[test]
fn extract_execute_response_unknown_code() {
let mut error = bazel_protos::status::Status::new();
error.set_code(555);
error.set_message("Oops, oh well!".to_string());
assert_eq!(
super::format_error(&error),
"555: Oops, oh well!".to_string()
);
}
#[test]
fn extract_execute_response_success() {
let want_result = FallibleExecuteProcessResult {
stdout: as_bytes("roland"),
stderr: Bytes::from("simba"),
exit_code: 17,
output_directory: TestDirectory::nested().digest(),
execution_attempts: vec![],
};
let mut output_file = bazel_protos::remote_execution::OutputFile::new();
output_file.set_path("cats/roland".into());
output_file.set_digest((&TestData::roland().digest()).into());
output_file.set_is_executable(false);
let mut output_files = protobuf::RepeatedField::new();
output_files.push(output_file);
let mut operation = bazel_protos::operations::Operation::new();
operation.set_name("cat".to_owned());
operation.set_done(true);
operation.set_response(make_any_proto(&{
let mut response = bazel_protos::remote_execution::ExecuteResponse::new();
response.set_result({
let mut result = bazel_protos::remote_execution::ActionResult::new();
result.set_exit_code(want_result.exit_code);
result.set_stdout_raw(Bytes::from(want_result.stdout.clone()));
result.set_stderr_raw(Bytes::from(want_result.stderr.clone()));
result.set_output_files(output_files);
result
});
response
}));
assert_eq!(
extract_execute_response(operation)
.unwrap()
.without_execution_attempts(),
want_result
);
}
#[test]
fn extract_execute_response_pending() {
let operation_name = "cat".to_owned();
let mut operation = bazel_protos::operations::Operation::new();
operation.set_name(operation_name.clone());
operation.set_done(false);
assert_eq!(
extract_execute_response(operation),
Err(ExecutionError::NotFinished(operation_name))
);
}
#[test]
fn extract_execute_response_missing_digests() {
let missing_files = vec![
TestData::roland().digest(),
TestDirectory::containing_roland().digest(),
];
let missing = missing_files
.iter()
.map(missing_preconditionfailure_violation)
.collect();
let operation = make_precondition_failure_operation(missing)
.op
.unwrap()
.unwrap();
assert_eq!(
extract_execute_response(operation),
Err(ExecutionError::MissingDigests(missing_files))
);
}
#[test]
fn extract_execute_response_missing_other_things() {
let missing = vec![
missing_preconditionfailure_violation(&TestData::roland().digest()),
{
let mut violation = bazel_protos::error_details::PreconditionFailure_Violation::new();
violation.set_field_type("MISSING".to_owned());
violation.set_subject("monkeys".to_owned());
violation
},
];
let operation = make_precondition_failure_operation(missing)
.op
.unwrap()
.unwrap();
match extract_execute_response(operation) {
Err(ExecutionError::Fatal(err)) => assert_contains(&err, "monkeys"),
other => assert!(false, "Want fatal error, got {:?}", other),
};
}
#[test]
fn extract_execute_response_other_failed_precondition() {
let missing = vec![{
let mut violation = bazel_protos::error_details::PreconditionFailure_Violation::new();
violation.set_field_type("OUT_OF_CAPACITY".to_owned());
violation
}];
let operation = make_precondition_failure_operation(missing)
.op
.unwrap()
.unwrap();
match extract_execute_response(operation) {
Err(ExecutionError::Fatal(err)) => assert_contains(&err, "OUT_OF_CAPACITY"),
other => assert!(false, "Want fatal error, got {:?}", other),
};
}
#[test]
fn extract_execute_response_missing_without_list() {
let missing = vec![];
let operation = make_precondition_failure_operation(missing)
.op
.unwrap()
.unwrap();
match extract_execute_response(operation) {
Err(ExecutionError::Fatal(err)) => assert_contains(&err.to_lowercase(), "precondition"),
other => assert!(false, "Want fatal error, got {:?}", other),
};
}
#[test]
fn extract_execute_response_other_status() {
let mut operation = bazel_protos::operations::Operation::new();
operation.set_name("cat".to_owned());
operation.set_done(true);
operation.set_response(make_any_proto(&{
let mut response = bazel_protos::remote_execution::ExecuteResponse::new();
response.set_status({
let mut status = bazel_protos::status::Status::new();
status.set_code(grpcio::RpcStatusCode::PermissionDenied as i32);
status
});
response
}));
match extract_execute_response(operation) {
Err(ExecutionError::Fatal(err)) => assert_contains(&err, "PermissionDenied"),
other => assert!(false, "Want fatal error, got {:?}", other),
};
}
#[test]
fn digest_command() {
let mut command = bazel_protos::remote_execution::Command::new();
command.mut_arguments().push("/bin/echo".to_string());
command.mut_arguments().push("foo".to_string());
let mut env1 = bazel_protos::remote_execution::Command_EnvironmentVariable::new();
env1.set_name("A".to_string());
env1.set_value("a".to_string());
command.mut_environment_variables().push(env1);
let mut env2 = bazel_protos::remote_execution::Command_EnvironmentVariable::new();
env2.set_name("B".to_string());
env2.set_value("b".to_string());
command.mut_environment_variables().push(env2);
let digest = super::digest(&command).unwrap();
assert_eq!(
&digest.0.to_hex(),
"a32cd427e5df6a998199266681692989f56c19cabd1cc637bdd56ae2e62619b4"
);
assert_eq!(digest.1, 32)
}
#[test]
fn wait_between_request_1_retry() {
// wait at least 500 milli for one retry
{
let execute_request = echo_foo_request();
let mock_server = {
let op_name = "gimme-foo".to_string();
mock::execution_server::TestServer::new(mock::execution_server::MockExecution::new(
op_name.clone(),
super::make_execute_request(&execute_request, empty_request_metadata())
.unwrap()
.2,
vec![
make_incomplete_operation(&op_name),
make_successful_operation(
&op_name,
StdoutType::Raw("foo".to_owned()),
StderrType::Raw("".to_owned()),
0,
),
],
))
};
run_command_remote(mock_server.address(), execute_request).unwrap();
let messages = mock_server.mock_responder.received_messages.lock();
assert!(messages.len() == 2);
assert!(
messages
.get(1)
.unwrap()
.received_at
.sub(messages.get(0).unwrap().received_at)
>= Duration::from_millis(500)
);
}
}
#[test]
fn wait_between_request_3_retry() {
// wait at least 500 + 1000 + 1500 = 3000 milli for 3 retries.
{
let execute_request = echo_foo_request();
let mock_server = {
let op_name = "gimme-foo".to_string();
mock::execution_server::TestServer::new(mock::execution_server::MockExecution::new(
op_name.clone(),
super::make_execute_request(&execute_request, empty_request_metadata())
.unwrap()
.2,
vec![
make_incomplete_operation(&op_name),
make_incomplete_operation(&op_name),
make_incomplete_operation(&op_name),
make_successful_operation(
&op_name,
StdoutType::Raw("foo".to_owned()),
StderrType::Raw("".to_owned()),
0,
),
],
))
};
run_command_remote(mock_server.address(), execute_request).unwrap();
let messages = mock_server.mock_responder.received_messages.lock();
assert!(messages.len() == 4);
assert!(
messages
.get(1)
.unwrap()
.received_at
.sub(messages.get(0).unwrap().received_at)
>= Duration::from_millis(500)
);
assert!(
messages
.get(2)
.unwrap()
.received_at
.sub(messages.get(1).unwrap().received_at)
>= Duration::from_millis(1000)
);
assert!(
messages
.get(3)
.unwrap()
.received_at
.sub(messages.get(2).unwrap().received_at)
>= Duration::from_millis(1500)
);
}
}
#[test]
fn extract_output_files_from_response_one_file() {
let mut output_file = bazel_protos::remote_execution::OutputFile::new();
output_file.set_path("roland".into());
output_file.set_digest((&TestData::roland().digest()).into());
output_file.set_is_executable(false);
let mut output_files = protobuf::RepeatedField::new();
output_files.push(output_file);
let mut execute_response = bazel_protos::remote_execution::ExecuteResponse::new();
execute_response.set_result({
let mut result = bazel_protos::remote_execution::ActionResult::new();
result.set_exit_code(0);
result.set_output_files(output_files);
result
});
assert_eq!(
extract_output_files_from_response(&execute_response),
Ok(TestDirectory::containing_roland().digest())
)
}
#[test]
fn extract_output_files_from_response_two_files_not_nested() {
let mut output_file_1 = bazel_protos::remote_execution::OutputFile::new();
output_file_1.set_path("roland".into());
output_file_1.set_digest((&TestData::roland().digest()).into());
output_file_1.set_is_executable(false);
let mut output_file_2 = bazel_protos::remote_execution::OutputFile::new();
output_file_2.set_path("treats".into());
output_file_2.set_digest((&TestData::catnip().digest()).into());
output_file_2.set_is_executable(false);
let mut output_files = protobuf::RepeatedField::new();
output_files.push(output_file_1);
output_files.push(output_file_2);
let mut execute_response = bazel_protos::remote_execution::ExecuteResponse::new();
execute_response.set_result({
let mut result = bazel_protos::remote_execution::ActionResult::new();
result.set_exit_code(0);
result.set_output_files(output_files);
result
});
assert_eq!(
extract_output_files_from_response(&execute_response),
Ok(TestDirectory::containing_roland_and_treats().digest())
)
}
#[test]
fn extract_output_files_from_response_two_files_nested() {
let mut output_file_1 = bazel_protos::remote_execution::OutputFile::new();
output_file_1.set_path("cats/roland".into());
output_file_1.set_digest((&TestData::roland().digest()).into());
output_file_1.set_is_executable(false);
let mut output_file_2 = bazel_protos::remote_execution::OutputFile::new();
output_file_2.set_path("treats".into());
output_file_2.set_digest((&TestData::catnip().digest()).into());
output_file_2.set_is_executable(false);
let mut output_files = protobuf::RepeatedField::new();
output_files.push(output_file_1);
output_files.push(output_file_2);
let mut execute_response = bazel_protos::remote_execution::ExecuteResponse::new();
execute_response.set_result({
let mut result = bazel_protos::remote_execution::ActionResult::new();
result.set_exit_code(0);
result.set_output_files(output_files);
result
});
assert_eq!(
extract_output_files_from_response(&execute_response),
Ok(TestDirectory::recursive().digest())
)
}
#[test]
fn extract_output_files_from_response_just_directory() {
let mut output_directory = bazel_protos::remote_execution::OutputDirectory::new();
output_directory.set_path("cats".into());
output_directory.set_tree_digest((&TestDirectory::containing_roland().digest()).into());
let mut output_directories = protobuf::RepeatedField::new();
output_directories.push(output_directory);
let mut execute_response = bazel_protos::remote_execution::ExecuteResponse::new();
execute_response.set_result({
let mut result = bazel_protos::remote_execution::ActionResult::new();
result.set_exit_code(0);
result.set_output_directories(output_directories);
result
});
assert_eq!(
extract_output_files_from_response(&execute_response),
Ok(TestDirectory::nested().digest())
)
}
#[test]
fn extract_output_files_from_response_directories_and_files() {
// /catnip
// /pets/cats/roland
// /pets/dogs/robin
let mut output_directories = protobuf::RepeatedField::new();
output_directories.push({
let mut output_directory = bazel_protos::remote_execution::OutputDirectory::new();
output_directory.set_path("pets/cats".into());
output_directory.set_tree_digest((&TestDirectory::containing_roland().digest()).into());
output_directory
});
output_directories.push({
let mut output_directory = bazel_protos::remote_execution::OutputDirectory::new();
output_directory.set_path("pets/dogs".into());
output_directory.set_tree_digest((&TestDirectory::containing_robin().digest()).into());
output_directory
});
let mut execute_response = bazel_protos::remote_execution::ExecuteResponse::new();
execute_response.set_result({
let mut result = bazel_protos::remote_execution::ActionResult::new();
result.set_exit_code(0);
result.set_output_directories(output_directories);
result.set_output_files({
let mut output_files = protobuf::RepeatedField::new();
output_files.push({
let mut output_file = bazel_protos::remote_execution::OutputFile::new();
output_file.set_path("treats".into());
output_file.set_digest((&TestData::catnip().digest()).into());
output_file
});
output_files
});
result
});
assert_eq!(
extract_output_files_from_response(&execute_response),
Ok(Digest(
Fingerprint::from_hex_string(
"639b4b84bb58a9353d49df8122e7987baf038efe54ed035e67910846c865b1e2"
)
.unwrap(),
159
))
)
}
#[test]
fn extract_output_files_from_response_no_prefix() {
let mut output_directory = bazel_protos::remote_execution::OutputDirectory::new();
output_directory.set_path(String::new());
output_directory.set_tree_digest((&TestDirectory::containing_roland().digest()).into());
let mut execute_response = bazel_protos::remote_execution::ExecuteResponse::new();
execute_response.set_result({
let mut result = bazel_protos::remote_execution::ActionResult::new();
result.set_exit_code(0);
result.mut_output_directories().push(output_directory);
result
});
assert_eq!(
extract_output_files_from_response(&execute_response),
Ok(TestDirectory::containing_roland().digest())
)
}
#[test]
fn remote_workunits_are_stored() {
let workunit_store = WorkUnitStore::new();
let op_name = "gimme-foo".to_string();
let testdata = TestData::roland();
let testdata_empty = TestData::empty();
let operation = make_successful_operation_with_metadata(
&op_name,
StdoutType::Digest(testdata.digest()),
StderrType::Raw(testdata_empty.string()),
0,
);
let cas = mock::StubCAS::builder()
.file(&TestData::roland())
.directory(&TestDirectory::containing_roland())
.build();
let command_runner = create_command_runner("".to_owned(), &cas);
let mut runtime = tokio::runtime::Runtime::new().unwrap();
let workunit_store_2 = workunit_store.clone();
runtime
.block_on(futures::future::lazy(move || {
command_runner.extract_execute_response(
super::OperationOrStatus::Operation(operation),
&mut ExecutionHistory::default(),
workunit_store_2,
)
}))
.unwrap();
let got_workunits = workunits_with_constant_span_id(&workunit_store);
let want_workunits = hashset! {
WorkUnit {
name: String::from("remote execution action scheduling"),
start_timestamp: Timespec::new(0, 0),
end_timestamp: Timespec::new(1, 0),
span_id: String::from("ignore"),
parent_id: None,
},
WorkUnit {
name: String::from("remote execution worker input fetching"),
start_timestamp: Timespec::new(2, 0),
end_timestamp: Timespec::new(3, 0),
span_id: String::from("ignore"),
parent_id: None,
},
WorkUnit {
name: String::from("remote execution worker command executing"),
start_timestamp: Timespec::new(4, 0),
end_timestamp: Timespec::new(5, 0),
span_id: String::from("ignore"),
parent_id: None,
},
WorkUnit {
name: String::from("remote execution worker output uploading"),
start_timestamp: Timespec::new(6, 0),
end_timestamp: Timespec::new(7, 0),
span_id: String::from("ignore"),
parent_id: None,
}
};
assert_eq!(got_workunits, want_workunits);
}
pub fn echo_foo_request() -> ExecuteProcessRequest {
ExecuteProcessRequest {
argv: owned_string_vec(&["/bin/echo", "-n", "foo"]),
env: BTreeMap::new(),
input_files: EMPTY_DIGEST,
output_files: BTreeSet::new(),
output_directories: BTreeSet::new(),
timeout: Duration::from_millis(5000),
description: "echo a foo".to_string(),
jdk_home: None,
}
}
fn make_canceled_operation(duration: Option<Duration>) -> MockOperation {
MockOperation {
op: Ok(None),
duration,
}
}
fn make_incomplete_operation(operation_name: &str) -> MockOperation {
let mut op = bazel_protos::operations::Operation::new();
op.set_name(operation_name.to_string());
op.set_done(false);
MockOperation::new(op)
}
fn make_delayed_incomplete_operation(operation_name: &str, delay: Duration) -> MockOperation {
let mut op = bazel_protos::operations::Operation::new();
op.set_name(operation_name.to_string());
op.set_done(false);
MockOperation {
op: Ok(Some(op)),
duration: Some(delay),
}
}
fn make_successful_operation_with_maybe_metadata(
operation_name: &str,
stdout: StdoutType,
stderr: StderrType,
exit_code: i32,
metadata: Option<ExecutedActionMetadata>,
) -> Operation {
let mut op = bazel_protos::operations::Operation::new();
op.set_name(operation_name.to_string());
op.set_done(true);
op.set_response({
let mut response_proto = bazel_protos::remote_execution::ExecuteResponse::new();
response_proto.set_result({
let mut action_result = bazel_protos::remote_execution::ActionResult::new();
match stdout {
StdoutType::Raw(stdout_raw) => {
action_result.set_stdout_raw(Bytes::from(stdout_raw));
}
StdoutType::Digest(stdout_digest) => {
action_result.set_stdout_digest((&stdout_digest).into());
}
}
match stderr {
StderrType::Raw(stderr_raw) => {
action_result.set_stderr_raw(Bytes::from(stderr_raw));
}
StderrType::Digest(stderr_digest) => {
action_result.set_stderr_digest((&stderr_digest).into());
}
}
action_result.set_exit_code(exit_code);
if let Some(metadata) = metadata {
action_result.set_execution_metadata(metadata);
};
action_result
});
let mut response_wrapper = protobuf::well_known_types::Any::new();
response_wrapper.set_type_url(format!(
"type.googleapis.com/{}",
response_proto.descriptor().full_name()
));
let response_proto_bytes = response_proto.write_to_bytes().unwrap();
response_wrapper.set_value(response_proto_bytes);
response_wrapper
});
op
}
fn make_successful_operation(
operation_name: &str,
stdout: StdoutType,
stderr: StderrType,
exit_code: i32,
) -> MockOperation {
let op = make_successful_operation_with_maybe_metadata(
operation_name,
stdout,
stderr,
exit_code,
None,
);
MockOperation::new(op)
}
fn make_successful_operation_with_metadata(
operation_name: &str,
stdout: StdoutType,
stderr: StderrType,
exit_code: i32,
) -> Operation {
let mut metadata = ExecutedActionMetadata::new();
metadata.set_queued_timestamp(timestamp_only_secs(0));
metadata.set_worker_start_timestamp(timestamp_only_secs(1));
metadata.set_input_fetch_start_timestamp(timestamp_only_secs(2));
metadata.set_input_fetch_completed_timestamp(timestamp_only_secs(3));
metadata.set_execution_start_timestamp(timestamp_only_secs(4));
metadata.set_execution_completed_timestamp(timestamp_only_secs(5));
metadata.set_output_upload_start_timestamp(timestamp_only_secs(6));
metadata.set_output_upload_completed_timestamp(timestamp_only_secs(7));
metadata.set_worker_completed_timestamp(timestamp_only_secs(8));
make_successful_operation_with_maybe_metadata(
operation_name,
stdout,
stderr,
exit_code,
Some(metadata),
)
}
fn timestamp_only_secs(v: i64) -> Timestamp {
let mut dummy_timestamp = Timestamp::new();
dummy_timestamp.set_seconds(v);
dummy_timestamp
}
fn make_precondition_failure_operation(
violations: Vec<bazel_protos::error_details::PreconditionFailure_Violation>,
) -> MockOperation {
let mut operation = bazel_protos::operations::Operation::new();
operation.set_name("cat".to_owned());
operation.set_done(true);
operation.set_response(make_any_proto(&{
let mut response = bazel_protos::remote_execution::ExecuteResponse::new();
response.set_status(make_precondition_failure_status(violations));
response
}));
MockOperation::new(operation)
}
fn make_precondition_failure_status(
violations: Vec<bazel_protos::error_details::PreconditionFailure_Violation>,
) -> bazel_protos::status::Status {
let mut status = bazel_protos::status::Status::new();
status.set_code(grpcio::RpcStatusCode::FailedPrecondition as i32);
status.mut_details().push(make_any_proto(&{
let mut precondition_failure = bazel_protos::error_details::PreconditionFailure::new();
for violation in violations.into_iter() {
precondition_failure.mut_violations().push(violation);
}
precondition_failure
}));
status
}
fn run_command_remote(
address: String,
request: ExecuteProcessRequest,
) -> Result<FallibleExecuteProcessResult, String> {
let cas = mock::StubCAS::builder()
.file(&TestData::roland())
.directory(&TestDirectory::containing_roland())
.build();
let command_runner = create_command_runner(address, &cas);
let mut runtime = tokio::runtime::Runtime::new().unwrap();
runtime.block_on(command_runner.run(request, WorkUnitStore::new()))
}
fn create_command_runner(address: String, cas: &mock::StubCAS) -> CommandRunner {
let store_dir = TempDir::new().unwrap();
let store = Store::with_remote(
task_executor::Executor::new(),
store_dir,
&[cas.address()],
None,
&None,
None,
1,
10 * 1024 * 1024,
Duration::from_secs(1),
store::BackoffConfig::new(Duration::from_millis(10), 1.0, Duration::from_millis(10)).unwrap(),
1,
)
.expect("Failed to make store");
CommandRunner::new(&address, empty_request_metadata(), None, None, store)
}
fn extract_execute_response(
operation: bazel_protos::operations::Operation,
) -> Result<FallibleExecuteProcessResult, ExecutionError> {
let cas = mock::StubCAS::builder()
.file(&TestData::roland())
.directory(&TestDirectory::containing_roland())
.build();
let command_runner = create_command_runner("".to_owned(), &cas);
let mut runtime = tokio::runtime::Runtime::new().unwrap();
runtime.block_on(command_runner.extract_execute_response(
super::OperationOrStatus::Operation(operation),
&mut ExecutionHistory::default(),
WorkUnitStore::new(),
))
}
fn extract_output_files_from_response(
execute_response: &bazel_protos::remote_execution::ExecuteResponse,
) -> Result<Digest, String> {
let cas = mock::StubCAS::builder()
.file(&TestData::roland())
.directory(&TestDirectory::containing_roland())
.build();
let command_runner = create_command_runner("".to_owned(), &cas);
let mut runtime = tokio::runtime::Runtime::new().unwrap();
runtime.block_on(super::extract_output_files(
command_runner.store.clone(),
&execute_response,
))
}
fn make_any_proto(message: &dyn Message) -> protobuf::well_known_types::Any {
let mut any = protobuf::well_known_types::Any::new();
any.set_type_url(format!(
"type.googleapis.com/{}",
message.descriptor().full_name()
));
any.set_value(message.write_to_bytes().expect("Error serializing proto"));
any
}
fn missing_preconditionfailure_violation(
digest: &Digest,
) -> bazel_protos::error_details::PreconditionFailure_Violation {
{
let mut violation = bazel_protos::error_details::PreconditionFailure_Violation::new();
violation.set_field_type("MISSING".to_owned());
violation.set_subject(format!("blobs/{}/{}", digest.0, digest.1));
violation
}
}
fn assert_contains(haystack: &str, needle: &str) {
assert!(
haystack.contains(needle),
"{:?} should contain {:?}",
haystack,
needle
)
}
fn cat_roland_request() -> ExecuteProcessRequest {
ExecuteProcessRequest {
argv: owned_string_vec(&["/bin/cat", "roland"]),
env: BTreeMap::new(),
input_files: TestDirectory::containing_roland().digest(),
output_files: BTreeSet::new(),
output_directories: BTreeSet::new(),
timeout: Duration::from_millis(1000),
description: "cat a roland".to_string(),
jdk_home: None,
}
}
fn echo_roland_request() -> ExecuteProcessRequest {
ExecuteProcessRequest {
argv: owned_string_vec(&["/bin/echo", "meoooow"]),
env: BTreeMap::new(),
input_files: EMPTY_DIGEST,
output_files: BTreeSet::new(),
output_directories: BTreeSet::new(),
timeout: Duration::from_millis(1000),
description: "unleash a roaring meow".to_string(),
jdk_home: None,
}
}
fn empty_request_metadata() -> ExecuteProcessRequestMetadata {
ExecuteProcessRequestMetadata {
instance_name: None,
cache_key_gen_version: None,
platform_properties: BTreeMap::new(),
}
}
}
| {
// If the error represented cancellation, return an Operation for the given Operation name.
match &err {
&grpcio::Error::RpcFailure(ref rs) if rs.status == grpcio::RpcStatusCode::Cancelled => {
let mut next_operation = bazel_protos::operations::Operation::new();
next_operation.set_name(operation_name);
return Ok(next_operation);
}
_ => {}
}
// Did not represent cancellation.
Err(err)
} |
state.ts | import { Pref, City } from '@/types/resas'
|
export type StateType = {
code: string
statField: string
routingPath: string
prefList: Pref[]
cityList: City[]
selectedPref?: Pref
selectedCity?: City
govType: GovType
}
export type RouteParams = {
code: string
statField: string
menuTitleId: string
} | export type GovType = 'prefecture' | 'city' |
knex-helpers.ts | return `CREATE TRIGGER ${table}_updated_at
BEFORE UPDATE ON ${table}
FOR EACH ROW
EXECUTE PROCEDURE on_update_timestamp();`;
}
export function dropOnUpdateTrigger(table: string) {
return `DROP TRIGGER ${table}_updated_at ON ${table};`;
} | export function createOnUpdateTrigger(table: string) { |
|
docs-reindex_9a4d5e41c52c20635d1fd9c6e13f6c7a_test.go | // Licensed to Elasticsearch B.V under one or more agreements.
// Elasticsearch B.V. licenses this file to you under the Apache 2.0 License.
// See the LICENSE file in the project root for more information.
//
// Code generated, DO NOT EDIT
package elasticsearch_test
import (
"fmt"
"os"
"strings"
"testing"
"github.com/elastic/go-elasticsearch/v8"
)
var (
_ = fmt.Printf
_ = os.Stdout
_ = elasticsearch.NewDefaultClient
)
// <https://github.com/elastic/elasticsearch/blob/master/docs/reference/docs/reindex.asciidoc#L751>
//
// --------------------------------------------------------------------------------
// PUT metricbeat-2016.05.30/_doc/1?refresh
// {"system.cpu.idle.pct": 0.908}
// PUT metricbeat-2016.05.31/_doc/1?refresh
// {"system.cpu.idle.pct": 0.105}
// --------------------------------------------------------------------------------
func | (t *testing.T) {
es, _ := elasticsearch.NewDefaultClient()
// tag:9a4d5e41c52c20635d1fd9c6e13f6c7a[]
{
res, err := es.Index(
"metricbeat-2016.05.30",
strings.NewReader(`{
"system.cpu.idle.pct": 0.908
}`),
es.Index.WithDocumentID("1"),
es.Index.WithRefresh("true"),
es.Index.WithPretty(),
)
fmt.Println(res, err)
if err != nil { // SKIP
t.Fatalf("Error getting the response: %s", err) // SKIP
} // SKIP
defer res.Body.Close() // SKIP
}
{
res, err := es.Index(
"metricbeat-2016.05.31",
strings.NewReader(`{
"system.cpu.idle.pct": 0.105
}`),
es.Index.WithDocumentID("1"),
es.Index.WithRefresh("true"),
es.Index.WithPretty(),
)
fmt.Println(res, err)
if err != nil { // SKIP
t.Fatalf("Error getting the response: %s", err) // SKIP
} // SKIP
defer res.Body.Close() // SKIP
}
// end:9a4d5e41c52c20635d1fd9c6e13f6c7a[]
}
| Test_docs_reindex_9a4d5e41c52c20635d1fd9c6e13f6c7a |
main.rs | //! Displays a shaded sphere to the user.
use amethyst::{
animation::*,
assets::{DefaultLoader, Handle, Loader, LoaderBundle, ProcessingQueue, ProgressCounter},
core::transform::{Transform, TransformBundle},
input::{get_key, is_close_requested, is_key_down, ElementState, VirtualKeyCode},
prelude::*,
renderer::{
light::{Light, PointLight},
loaders::load_from_linear_rgba,
palette::{LinSrgba, Srgb},
plugins::{RenderPbr3D, RenderToWindow},
rendy::{
hal::command::ClearColor,
mesh::{Normal, Position, Tangent, TexCoord},
},
shape::Shape,
types::{DefaultBackend, MeshData, TextureData},
Camera, Material, MaterialDefaults, Mesh, RenderingBundle, Texture,
},
utils::application_root_dir,
window::ScreenDimensions,
};
use serde::{Deserialize, Serialize};
const CLEAR_COLOR: ClearColor = ClearColor {
float32: [0.0, 0.0, 0.0, 1.0],
};
#[derive(Eq, PartialOrd, PartialEq, Hash, Debug, Copy, Clone, Deserialize, Serialize)]
enum AnimationId {
Scale,
Rotate,
Translate,
Test,
}
struct Example {
pub sphere: Option<Entity>,
rate: f32,
current_animation: AnimationId,
pub progress_counter: Option<ProgressCounter>,
}
impl Default for Example {
fn default() -> Self {
Example {
sphere: None,
rate: 1.0,
current_animation: AnimationId::Test,
progress_counter: Some(ProgressCounter::default()),
}
}
}
impl SimpleState for Example {
fn on_start(&mut self, data: StateData<'_, GameData>) {
let StateData {
world, resources, ..
} = data;
let mut transform = Transform::default();
transform.set_translation_xyz(0.0, 0.0, -4.0);
transform.prepend_rotation_y_axis(std::f32::consts::PI);
let (width, height) = {
let dim = resources.get::<ScreenDimensions>().unwrap();
(dim.width(), dim.height())
};
world.extend(vec![(Camera::standard_3d(width, height), transform)]);
let loader = resources.get::<DefaultLoader>().unwrap();
// Add a sphere
let mesh: Handle<Mesh> = loader.load_from_data::<Mesh, (), MeshData>(
Shape::Sphere(64, 64)
.generate::<(Vec<Position>, Vec<Normal>, Vec<Tangent>, Vec<TexCoord>)>(None)
.into(),
(),
&resources.get().unwrap(),
);
let albedo = loader.load_from_data::<Texture, (), TextureData>(
load_from_linear_rgba(LinSrgba::new(1.0, 1.0, 1.0, 0.5)).into(),
(),
&resources.get().unwrap(),
);
let mtl: Handle<Material> = {
let mat_defaults = resources.get::<MaterialDefaults>().unwrap().0.clone();
loader.load_from_data(
Material {
albedo,
..mat_defaults
},
(),
&resources.get().unwrap(),
)
};
// light it up
let light1: Light = PointLight {
intensity: 6.0,
color: Srgb::new(0.8, 0.0, 0.0),
..PointLight::default()
}
.into();
let mut light1_transform = Transform::default();
light1_transform.set_translation_xyz(6.0, 6.0, -6.0);
let light2: Light = PointLight {
intensity: 5.0,
color: Srgb::new(0.0, 0.3, 0.7),
..PointLight::default()
}
.into();
let mut light2_transform = Transform::default();
light2_transform.set_translation_xyz(6.0, -6.0, -6.0);
world.extend(vec![(light1, light1_transform), (light2, light2_transform)]);
// make it dance
let sampler = loader.load_from_data(
Sampler::<SamplerPrimitive<f32>> {
input: vec![0., 1., 2.],
output: vec![
SamplerPrimitive::Vec3([0., 0., 0.]),
SamplerPrimitive::Vec3([2., 3., 0.]),
SamplerPrimitive::Vec3([1., 2., 3.]),
],
function: InterpolationFunction::Linear,
},
(),
&resources
.get::<ProcessingQueue<Sampler<SamplerPrimitive<f32>>>>()
.expect("ProcessingQueue for Sampler"),
);
let animation = loader.load_from_data(
Animation::<Transform>::new_single(0, TransformChannel::Translation, sampler),
(),
&resources.get().unwrap(),
);
let mut animation_set: AnimationSet<AnimationId, Transform> = AnimationSet::new();
animation_set.insert(AnimationId::Test, animation);
self.sphere = Some(world.push((Transform::default(), mesh, mtl, animation_set)));
}
fn | (&mut self, data: StateData<'_, GameData>, event: StateEvent) -> SimpleTrans {
let StateData {
world, resources, ..
} = data;
let mut buffer = CommandBuffer::new(world);
if let StateEvent::Window(event) = &event {
if is_close_requested(&event) || is_key_down(&event, VirtualKeyCode::Escape) {
return Trans::Quit;
}
match get_key(&event) {
Some((VirtualKeyCode::Space, ElementState::Pressed)) => {
add_animation(
world,
resources,
self.sphere.unwrap(),
self.current_animation,
self.rate,
None,
true,
);
}
Some((VirtualKeyCode::D, ElementState::Pressed)) => {
add_animation(
world,
resources,
self.sphere.unwrap(),
AnimationId::Translate,
self.rate,
None,
false,
);
add_animation(
world,
resources,
self.sphere.unwrap(),
AnimationId::Rotate,
self.rate,
Some((AnimationId::Translate, DeferStartRelation::End)),
false,
);
add_animation(
world,
resources,
self.sphere.unwrap(),
AnimationId::Scale,
self.rate,
Some((AnimationId::Rotate, DeferStartRelation::Start(0.666))),
false,
);
}
Some((VirtualKeyCode::Left, ElementState::Pressed)) => {
get_animation_set::<AnimationId, Transform, World>(
world,
&mut buffer,
self.sphere.unwrap(),
)
.unwrap()
.step(&self.current_animation, StepDirection::Backward);
}
Some((VirtualKeyCode::Right, ElementState::Pressed)) => {
get_animation_set::<AnimationId, Transform, World>(
world,
&mut buffer,
self.sphere.unwrap(),
)
.unwrap()
.step(&self.current_animation, StepDirection::Forward);
}
Some((VirtualKeyCode::F, ElementState::Pressed)) => {
self.rate = 1.0;
get_animation_set::<AnimationId, Transform, World>(
world,
&mut buffer,
self.sphere.unwrap(),
)
.unwrap()
.set_rate(&self.current_animation, self.rate);
}
Some((VirtualKeyCode::V, ElementState::Pressed)) => {
self.rate = 0.0;
get_animation_set::<AnimationId, Transform, World>(
world,
&mut buffer,
self.sphere.unwrap(),
)
.unwrap()
.set_rate(&self.current_animation, self.rate);
}
Some((VirtualKeyCode::H, ElementState::Pressed)) => {
self.rate = 0.5;
get_animation_set::<AnimationId, Transform, World>(
world,
&mut buffer,
self.sphere.unwrap(),
)
.unwrap()
.set_rate(&self.current_animation, self.rate);
}
Some((VirtualKeyCode::R, ElementState::Pressed)) => {
self.current_animation = AnimationId::Rotate;
}
Some((VirtualKeyCode::S, ElementState::Pressed)) => {
self.current_animation = AnimationId::Scale;
}
Some((VirtualKeyCode::T, ElementState::Pressed)) => {
self.current_animation = AnimationId::Translate;
}
_ => {}
};
}
buffer.flush(world, resources);
Trans::None
}
fn update(&mut self, data: &mut StateData<'_, GameData>) -> SimpleTrans {
let StateData {
world, resources, ..
} = data;
let mut query = <(Entity, Read<AnimationSet<AnimationId, Transform>>)>::query();
let mut buffer = CommandBuffer::new(world);
if let Some(ref progress_counter) = self.progress_counter {
// Checks progress
if progress_counter.is_complete() {
let (query_world, mut subworld) = world.split_for_query(&query);
for (entity, animation_set) in query.iter(&query_world) {
// Creates a new AnimationControlSet for the entity
if let Some(control_set) =
get_animation_set(&mut subworld, &mut buffer, *entity)
{
if control_set.is_empty() {
// Adds the `Fly` animation to AnimationControlSet and loops infinitely
control_set.add_animation(
AnimationId::Test,
&animation_set.get(&AnimationId::Test).unwrap(),
EndControl::Loop(None),
1.0,
AnimationCommand::Start,
);
self.progress_counter = None;
}
}
}
}
}
buffer.flush(world, resources);
Trans::None
}
}
fn main() -> amethyst::Result<()> {
amethyst::Logger::from_config(amethyst::LoggerConfig {
level_filter: log::LevelFilter::Error,
..Default::default()
})
.start();
let app_root = application_root_dir()?;
let display_config_path = app_root.join("config/display.ron");
let assets_dir = app_root.join("assets/");
let mut game_data = DispatcherBuilder::default();
game_data
.add_bundle(LoaderBundle)
.add_bundle(AnimationBundle::<AnimationId, Transform>::default())
.add_bundle(TransformBundle::default())
.add_bundle(
RenderingBundle::<DefaultBackend>::new()
.with_plugin(
RenderToWindow::from_config_path(display_config_path)?.with_clear(CLEAR_COLOR),
)
.with_plugin(RenderPbr3D::default()),
);
let state: Example = Default::default();
let game = Application::build(assets_dir, state)?.build(game_data)?;
game.run();
Ok(())
}
fn add_animation(
world: &mut World,
resources: &mut Resources,
entity: Entity,
id: AnimationId,
rate: f32,
defer: Option<(AnimationId, DeferStartRelation)>,
toggle_if_exists: bool,
) {
let animation = {
let entry = world.entry_ref(entity).unwrap();
let set = entry
.get_component::<AnimationSet<AnimationId, Transform>>()
.expect("AnimationSet for Entity");
set.get(&id).cloned()
};
if let Some(animation) = animation {
let mut buffer = CommandBuffer::new(world);
let control_set =
get_animation_set::<AnimationId, Transform, World>(world, &mut buffer, entity).unwrap();
match defer {
None => {
if toggle_if_exists && control_set.has_animation(&id) {
control_set.toggle(&id);
} else {
control_set.add_animation(
id,
&animation,
EndControl::Normal,
rate,
AnimationCommand::Start,
);
}
}
Some((defer_id, defer_relation)) => {
control_set.add_deferred_animation(
id,
&animation,
EndControl::Normal,
rate,
AnimationCommand::Start,
defer_id,
defer_relation,
);
}
}
buffer.flush(world, resources);
}
}
| handle_event |
generate_test.go | //
// Copyright 2021-2022 Red Hat, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package gitops
import (
"reflect"
"testing"
routev1 "github.com/openshift/api/route/v1"
appstudiov1alpha1 "github.com/redhat-appstudio/application-service/api/v1alpha1"
appsv1 "k8s.io/api/apps/v1"
corev1 "k8s.io/api/core/v1"
"k8s.io/apimachinery/pkg/api/resource"
v1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/util/intstr"
)
func TestGenerateDeployment(t *testing.T) {
applicationName := "test-application"
componentName := "test-component"
namespace := "test-namespace"
replicas := int32(1)
otherReplicas := int32(3)
k8slabels := map[string]string{
"app.kubernetes.io/name": componentName,
"app.kubernetes.io/instance": componentName,
"app.kubernetes.io/part-of": applicationName,
"app.kubernetes.io/managed-by": "kustomize",
"app.kubernetes.io/created-by": "application-service",
}
matchLabels := map[string]string{
"app.kubernetes.io/instance": componentName,
}
tests := []struct {
name string
component appstudiov1alpha1.Component
wantDeployment appsv1.Deployment
}{
{
name: "Simple component, no optional fields set",
component: appstudiov1alpha1.Component{
ObjectMeta: v1.ObjectMeta{
Name: componentName,
Namespace: namespace,
},
Spec: appstudiov1alpha1.ComponentSpec{
ComponentName: componentName,
Application: applicationName,
},
},
wantDeployment: appsv1.Deployment{
TypeMeta: v1.TypeMeta{
Kind: "Deployment",
APIVersion: "apps/v1",
},
ObjectMeta: v1.ObjectMeta{
Name: componentName,
Namespace: namespace,
Labels: k8slabels,
},
Spec: appsv1.DeploymentSpec{
Replicas: &replicas,
Selector: &v1.LabelSelector{
MatchLabels: matchLabels,
},
Template: corev1.PodTemplateSpec{
ObjectMeta: v1.ObjectMeta{
Labels: matchLabels,
},
Spec: corev1.PodSpec{
Containers: []corev1.Container{
{
Name: "container-image",
ImagePullPolicy: corev1.PullAlways,
},
},
},
},
},
},
},
{
name: "Component, optional fields set",
component: appstudiov1alpha1.Component{
ObjectMeta: v1.ObjectMeta{
Name: componentName,
Namespace: namespace,
},
Spec: appstudiov1alpha1.ComponentSpec{
ComponentName: componentName,
Application: applicationName,
Replicas: 3,
TargetPort: 5000,
Build: appstudiov1alpha1.Build{
ContainerImage: "quay.io/test/test-image:latest",
},
Env: []corev1.EnvVar{
{
Name: "test",
Value: "value",
},
},
Resources: corev1.ResourceRequirements{
Limits: corev1.ResourceList{
corev1.ResourceCPU: resource.MustParse("2M"),
corev1.ResourceMemory: resource.MustParse("1Gi"),
},
Requests: corev1.ResourceList{
corev1.ResourceCPU: resource.MustParse("1M"),
corev1.ResourceMemory: resource.MustParse("256Mi"),
},
},
},
},
wantDeployment: appsv1.Deployment{
TypeMeta: v1.TypeMeta{
Kind: "Deployment",
APIVersion: "apps/v1",
},
ObjectMeta: v1.ObjectMeta{
Name: componentName,
Namespace: namespace,
Labels: k8slabels,
},
Spec: appsv1.DeploymentSpec{
Replicas: &otherReplicas,
Selector: &v1.LabelSelector{
MatchLabels: matchLabels,
},
Template: corev1.PodTemplateSpec{
ObjectMeta: v1.ObjectMeta{
Labels: matchLabels,
},
Spec: corev1.PodSpec{
Containers: []corev1.Container{
{
Name: "container-image",
Image: "quay.io/test/test-image:latest",
ImagePullPolicy: corev1.PullAlways,
Env: []corev1.EnvVar{
{
Name: "test",
Value: "value",
},
},
Ports: []corev1.ContainerPort{
{
ContainerPort: int32(5000),
},
},
ReadinessProbe: &corev1.Probe{
InitialDelaySeconds: 10,
PeriodSeconds: 10,
Handler: corev1.Handler{
TCPSocket: &corev1.TCPSocketAction{
Port: intstr.FromInt(5000),
},
},
},
LivenessProbe: &corev1.Probe{
InitialDelaySeconds: 10,
PeriodSeconds: 10,
Handler: corev1.Handler{
HTTPGet: &corev1.HTTPGetAction{
Port: intstr.FromInt(5000),
Path: "/",
},
},
},
Resources: corev1.ResourceRequirements{
Limits: corev1.ResourceList{
corev1.ResourceCPU: resource.MustParse("2M"),
corev1.ResourceMemory: resource.MustParse("1Gi"),
},
Requests: corev1.ResourceList{
corev1.ResourceCPU: resource.MustParse("1M"),
corev1.ResourceMemory: resource.MustParse("256Mi"),
},
},
},
},
},
},
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
generatedDeployment := generateDeployment(tt.component)
if !reflect.DeepEqual(*generatedDeployment, tt.wantDeployment) {
t.Errorf("TestGenerateDeployment() error: expected %v got %v", tt.wantDeployment, generatedDeployment)
}
})
}
}
func TestGenerateService(t *testing.T) {
applicationName := "test-application"
componentName := "test-component"
namespace := "test-namespace"
k8slabels := map[string]string{
"app.kubernetes.io/name": componentName,
"app.kubernetes.io/instance": componentName,
"app.kubernetes.io/part-of": applicationName,
"app.kubernetes.io/managed-by": "kustomize",
"app.kubernetes.io/created-by": "application-service",
}
matchLabels := map[string]string{
"app.kubernetes.io/instance": componentName,
}
tests := []struct { | name string
component appstudiov1alpha1.Component
wantService corev1.Service
}{
{
name: "Simple component object",
component: appstudiov1alpha1.Component{
ObjectMeta: v1.ObjectMeta{
Name: componentName,
Namespace: namespace,
},
Spec: appstudiov1alpha1.ComponentSpec{
ComponentName: componentName,
Application: applicationName,
TargetPort: 5000,
},
},
wantService: corev1.Service{
TypeMeta: v1.TypeMeta{
APIVersion: "v1",
Kind: "Service",
},
ObjectMeta: v1.ObjectMeta{
Name: componentName,
Namespace: namespace,
Labels: k8slabels,
},
Spec: corev1.ServiceSpec{
Selector: matchLabels,
Ports: []corev1.ServicePort{
{
Port: int32(5000),
TargetPort: intstr.FromInt(5000),
},
},
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
generatedService := generateService(tt.component)
if !reflect.DeepEqual(*generatedService, tt.wantService) {
t.Errorf("TestGenerateService() error: expected %v got %v", tt.wantService, generatedService)
}
})
}
}
func TestGenerateRoute(t *testing.T) {
applicationName := "test-application"
componentName := "test-component"
namespace := "test-namespace"
k8slabels := map[string]string{
"app.kubernetes.io/name": componentName,
"app.kubernetes.io/instance": componentName,
"app.kubernetes.io/part-of": applicationName,
"app.kubernetes.io/managed-by": "kustomize",
"app.kubernetes.io/created-by": "application-service",
}
weight := int32(100)
tests := []struct {
name string
component appstudiov1alpha1.Component
wantRoute routev1.Route
}{
{
name: "Simple component object",
component: appstudiov1alpha1.Component{
ObjectMeta: v1.ObjectMeta{
Name: componentName,
Namespace: namespace,
},
Spec: appstudiov1alpha1.ComponentSpec{
ComponentName: componentName,
Application: applicationName,
TargetPort: 5000,
},
},
wantRoute: routev1.Route{
TypeMeta: v1.TypeMeta{
Kind: "Route",
APIVersion: "route.openshift.io/v1",
},
ObjectMeta: v1.ObjectMeta{
Name: componentName,
Namespace: namespace,
Labels: k8slabels,
},
Spec: routev1.RouteSpec{
Port: &routev1.RoutePort{
TargetPort: intstr.FromInt(5000),
},
TLS: &routev1.TLSConfig{
InsecureEdgeTerminationPolicy: routev1.InsecureEdgeTerminationPolicyRedirect,
Termination: routev1.TLSTerminationEdge,
},
To: routev1.RouteTargetReference{
Kind: "Service",
Name: componentName,
Weight: &weight,
},
},
},
},
{
name: "Component object with route/hostname set",
component: appstudiov1alpha1.Component{
ObjectMeta: v1.ObjectMeta{
Name: componentName,
Namespace: namespace,
},
Spec: appstudiov1alpha1.ComponentSpec{
ComponentName: componentName,
Application: applicationName,
TargetPort: 5000,
Route: "example.com",
},
},
wantRoute: routev1.Route{
TypeMeta: v1.TypeMeta{
Kind: "Route",
APIVersion: "route.openshift.io/v1",
},
ObjectMeta: v1.ObjectMeta{
Name: componentName,
Namespace: namespace,
Labels: k8slabels,
},
Spec: routev1.RouteSpec{
Host: "example.com",
Port: &routev1.RoutePort{
TargetPort: intstr.FromInt(5000),
},
TLS: &routev1.TLSConfig{
InsecureEdgeTerminationPolicy: routev1.InsecureEdgeTerminationPolicyRedirect,
Termination: routev1.TLSTerminationEdge,
},
To: routev1.RouteTargetReference{
Kind: "Service",
Name: componentName,
Weight: &weight,
},
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
generatedRoute := generateRoute(tt.component)
if !reflect.DeepEqual(*generatedRoute, tt.wantRoute) {
t.Errorf("TestGenerateRoute() error: expected %v got %v", tt.wantRoute, generatedRoute)
}
})
}
} | |
app-configuration.d.ts | export interface IAppConfiguration { | dhis2_url: string;
dhis2_username: string;
dhis2_password: string;
} |
|
BooksList.js | import { useDispatch, useSelector } from 'react-redux';
import PropTypes from 'prop-types';
import { Box } from '@material-ui/core';
import Book from '../components/Book';
import { changeFilter, removeBook } from '../actions';
import CategoryFilter from '../components/CategoryFilter';
const BooksList = () => {
const books = useSelector((state) => state.bookReducer);
const categories = useSelector((state) => state.filterReducer);
const dispatch = useDispatch();
const handleRemoveBook = (book) => {
dispatch(removeBook(book));
};
const handleFilterChange = (category) => dispatch(changeFilter(category));
return (
<div>
<CategoryFilter filterHandler={handleFilterChange} />
<Box>
{
books.filter((book) => (categories === 'All'
? books
: book.category === categories)).map((book, index) => (
<Box key={book.id}>
<Book
book={book} id={index + 1} removeBookHandler={ (book) => handleRemoveBook(book)}
/>
</Box>
))
}
</Box>
</div>
);
};
BooksList.propTypes = {
books: PropTypes.array,
};
BooksList.defaultProps = { | books: [],
};
export default BooksList; | |
registrar.go | /* Copyright (c) 2016 Chris Smith
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
* IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package unifiedbeat
import (
"encoding/json"
"io/ioutil"
"os"
"path/filepath"
"sync"
"github.com/elastic/beats/libbeat/logp"
)
// Registrar should only have one entry, which
// is the offset into the unified2 file
// currently being tailed (if any)
type Registrar struct {
registryFile string // path to the registry file
State FileState // unified2 file name and offset
sync.Mutex // lock and unlock during writes
}
// remove the ",omitempty"s so something is written
// to the registry file instead of just "{}"
type FileState struct {
Offset int64 `json:"offset"`
Source string `json:"source"`
}
func NewRegistrar(registryFile string) (*Registrar, error) |
func (r *Registrar) LoadState() {
if existing, e := os.Open(r.registryFile); e == nil {
defer existing.Close()
decoder := json.NewDecoder(existing)
decoder.Decode(&r.State)
}
}
func (r *Registrar) WriteRegistry() error {
r.Lock()
defer r.Unlock()
// can't truncate a file that does not exist:
_, err := os.Stat(r.registryFile)
if os.IsExist(err) {
err := os.Truncate(r.registryFile, 0)
if err != nil {
logp.Info("WriteRegistry: os.Truncate: err=%v\n", err)
return err
}
}
// if "json.Marshal" or "ioutil.WriteFile" fail then most likely
// unifiedbeat does not have access to the registry file
jsonState, err := json.Marshal(r.State)
if err != nil {
logp.Info("WriteRegistry: json.Marshal: err=%v\n", err)
return err
}
// https://golang.org/pkg/io/ioutil/#WriteFile
// If the file does not exist, WriteFile creates it with
// permissions 0644; otherwise it is truncated.
err = ioutil.WriteFile(r.registryFile, jsonState, 0644)
if err != nil {
logp.Info("WriteRegistry: ioutil.WriteFile: err=%v\n", err)
return err
}
return nil
}
| {
r := &Registrar{
registryFile: registryFile,
}
// Ensure we have access to write the registry file
// by creating, closing, and removing a test file.
// Of course, access could still fail in later
// calls to LoadState or WriteRegistry.
// There is no perfect solution as files and
// permissions are just a mess, but we tried.
testfile := r.registryFile + ".access.test"
file, err := os.Create(testfile)
if err != nil {
logp.Info("NewRegistrar: test 'create file' access was denied to path for registry file: '%v'\n", r.registryFile)
return nil, err
}
err = file.Close()
if err != nil {
// really? we lost access after Create, really?
logp.Info("NewRegistrar: test 'close file' access was denied to path for registry file: '%v'\n", r.registryFile)
return nil, err
}
err = os.Remove(testfile)
if err != nil {
// really? we lost access after Create and Close, really?
logp.Info("NewRegistrar: test 'remove file' access was denied to path for registry file: '%v'\n", r.registryFile)
return nil, err
}
// set an absolute path to the registryFile:
absPath, err := filepath.Abs(r.registryFile)
if err != nil {
logp.Info("NewRegistrar: failed to set the absolute path for registry file: '%s'\n", r.registryFile)
return nil, err
}
r.registryFile = absPath
return r, err
} |
plot1.py | #!/bin/python3
'''
This file is to plot a graph with the following setting.
1. We first select an image x_0
2. We then add some pertubation to the image to get x_1 (its type shall
configurable in the future, but we set it to be random or loaded from file
currently)
3. Next, we plot f(x) for all x on the segment x_0 to x_1
4. Finally, we optionally save the pertuabation for future work
Example:
python plot1.py --train '' --network lfc --ranking True --fidelity True --std_modeling True --std_loss '' --margin 0.025 --batch_size 128 --batch_size2 32 --image_size 384 --max_epochs 3 --lr 1e-4 --decay_interval 3 --decay_ratio 0.1 --fixvar --max_epochs2 12 --batch_size=16 --batch_size2=16 --ckpt_path=checkpoints_many/lfc -x /data_partition/yang/fyp/adv_1/IQA_database_syn/databaserelease2/jp2k/img4.bmp --pertubation_length 0.01
python plot1.py --train '' --network lfc --ranking True --fidelity True --std_modeling True --std_loss '' --margin 0.025 --batch_size 128 --batch_size2 32 --image_size 384 --max_epochs 3 --lr 1e-4 --decay_interval 3 --decay_ratio 0.1 --fixvar --max_epochs2 12 --batch_size=16 --batch_size2=16 --ckpt_path=checkpoints_many/lfc_lip -x /data_partition/yang/fyp/adv_1/IQA_database_syn/databaserelease2/jp2k/img4.bmp --pertubation_length 0.01
python plot1.py --train '' --network lfc --ranking True --fidelity True --std_modeling True --std_loss '' --margin 0.025 --batch_size 128 --batch_size2 32 --image_size 384 --max_epochs 3 --lr 1e-4 --decay_interval 3 --decay_ratio 0.1 --fixvar --max_epochs2 12 --batch_size=16 --batch_size2=16 --ckpt_path=checkpoints_many/lfc_nom -x /data_partition/yang/fyp/adv_1/IQA_database_syn/databaserelease2/jp2k/img4.bmp --pertubation_length 0.01 --force_normalization
python plot1.py --train '' --network lfc_relu --ranking True --fidelity True --std_modeling True --std_loss '' --margin 0.025 --batch_size 128 --batch_size2 32 --image_size 384 --max_epochs 3 --lr 1e-4 --decay_interval 3 --decay_ratio 0.1 --fixvar --max_epochs2 12 --batch_size=16 --batch_size2=16 --ckpt_path=checkpoints_many/lfc_relu_nom -x /data_partition/yang/fyp/adv_1/IQA_database_syn/databaserelease2/jp2k/img4.bmp --pertubation_length 0.01 --force_normalization
python plot1.py --train '' --network lfc_relu --ranking True --fidelity True --std_modeling True --std_loss '' --margin 0.025 --batch_size 128 --batch_size2 32 --image_size 384 --max_epochs 3 --lr 1e-4 --decay_interval 3 --decay_ratio 0.1 --fixvar --max_epochs2 12 --batch_size=16 --batch_size2=16 --ckpt_path=checkpoints_many/lfc_relu_nom_lip -x /data_partition/yang/fyp/adv_1/IQA_database_syn/databaserelease2/jp2k/img4.bmp --pertubation_length 0.01 --force_normalization
'''
import argparse
import TrainModel
import scipy.io as sio
import os
import torch
from PIL import Image
import numpy as np
import matplotlib.pyplot as plt
from torchvision import transforms
def | ():
parser = argparse.ArgumentParser()
parser.add_argument('-x', '--img', type=str, help='the base image')
parser.add_argument('-p', '--pertubation', type=str, default='',
help='the pertubation of the image, will be randomly generated if not presented')
parser.add_argument('--pertubation_length', type=float, default=0.01,
help='the length of the pertubataion, if random generation is nessesary')
parser.add_argument('-s', '--save_pertubation', type=str, default='',
help='whether the pertubation should be saved')
parser.add_argument("--train", type=bool, default=True)
parser.add_argument('--get_scores', type=bool, default=False)
parser.add_argument("--use_cuda", type=bool, default=True)
# parser.add_argument("--device", type=str, default="cuda")
parser.add_argument("--resume", action='store_true')
parser.add_argument("--seed", type=int, default=19901116)
parser.add_argument("--backbone", type=str, default='resnet34')
parser.add_argument("--fc", type=bool, default=True)
parser.add_argument('--scnn_root', type=str, default='saved_weights/scnn.pkl')
parser.add_argument("--network", type=str, default="basecnn",
help='basecnn or dbcnn or lfc')
parser.add_argument("--representation", type=str, default="BCNN")
parser.add_argument("--ranking", type=bool, default=True,
help='True for learning-to-rank False for regular regression')
parser.add_argument("--fidelity", type=bool, default=True,
help='True for fidelity loss False for regular ranknet with CE loss')
parser.add_argument("--std_modeling", type=bool,
default=True) # True for modeling std False for not
parser.add_argument("--std_loss", type=bool, default=True)
parser.add_argument("--fixvar", action='store_true') #+
parser.add_argument("--force_normalization", action='store_true')
parser.add_argument("--lipschitz", action='store_true')
parser.add_argument("--margin", type=float, default=0.025)
parser.add_argument("--split", type=int, default=1)
parser.add_argument("--trainset", type=str, default="./IQA_database/")
parser.add_argument("--live_set", type=str, default="./IQA_database/databaserelease2/")
parser.add_argument("--csiq_set", type=str, default="./IQA_database/CSIQ/")
parser.add_argument("--tid2013_set", type=str, default="./IQA_database/TID2013/")
parser.add_argument("--bid_set", type=str, default="./IQA_database/BID/")
#parser.add_argument("--cid_set", type=str, default="./IQA_database/CID2013_camera/")
parser.add_argument("--clive_set", type=str, default="./IQA_database/ChallengeDB_release/")
parser.add_argument("--koniq10k_set", type=str, default="./IQA_database/koniq-10k/")
parser.add_argument("--kadid10k_set", type=str, default="./IQA_database/kadid10k/")
parser.add_argument("--eval_live", type=bool, default=True)
parser.add_argument("--eval_csiq", type=bool, default=True)
parser.add_argument("--eval_tid2013", type=bool, default=False)
parser.add_argument("--eval_kadid10k", type=bool, default=True)
parser.add_argument("--eval_bid", type=bool, default=True)
parser.add_argument("--eval_clive", type=bool, default=True)
parser.add_argument("--eval_koniq10k", type=bool, default=True)
parser.add_argument("--split_modeling", type=bool, default=False)
parser.add_argument('--ckpt_path', default='./checkpoint', type=str,
metavar='PATH', help='path to checkpoints')
parser.add_argument('--ckpt', default=None, type=str, help='name of the checkpoint to load')
parser.add_argument("--train_txt", type=str, default='train.txt') # train.txt | train_synthetic.txt | train_authentic.txt | train_sub2.txt | train_score.txt
parser.add_argument("--batch_size", type=int, default=128)
parser.add_argument("--batch_size2", type=int, default=32)
parser.add_argument("--image_size", type=int, default=384, help='None means random resolution')
parser.add_argument("--max_epochs", type=int, default=3)
parser.add_argument("--max_epochs2", type=int, default=12)
parser.add_argument("--lr", type=float, default=1e-4)
parser.add_argument("--decay_interval", type=int, default=3)
parser.add_argument("--decay_ratio", type=float, default=0.1)
parser.add_argument("--epochs_per_eval", type=int, default=1)
parser.add_argument("--epochs_per_save", type=int, default=1)
parser.add_argument("--verbose", action='store_true')
config = parser.parse_args()
config.to_test = []
return config
def main(config):
t = TrainModel.Trainer(config)
# checking compatability
if config.fixvar and not config.network.startswith('lfc'):
raise NotImplementedError()
if str(config.backbone).startswith('lfc') and not config.std_modeling:
raise NotImplementedError()
model = t.model
pil_img = Image.open(config.img)
# pil_img = pil_img.reshape((1,) + tuple(pil_img.shape))
img = t.test_transform(pil_img).to(t.device)
if config.pertubation:
with open(config.pertubation, 'rb') as f:
pertubation = torch.load(f)
else:
pertubation = torch.rand(img.shape) * config.pertubation_length
pertubation = pertubation.to(t.device)
img = img.unsqueeze(0)
print(img.shape)
if config.save_pertubation:
with open(config.save_pertubation, 'wb') as f:
torch.save(pertubation, f)
should_normalize = not config.network.startswith('lfc') or config.force_normalization
if should_normalize:
normalization_transform = \
transforms.Normalize(mean=(0.485, 0.456, 0.406),
std=(0.229, 0.224, 0.225))
pertubation = normalization_transform(pertubation)
x = list(np.linspace(0, 1, 100))
y = [t.predict_single_image(img + p * pertubation).detach().cpu().numpy() for p in x]
plt.plot(x, y)
plt.show()
if __name__ == "__main__":
config = parse_config()
main(config)
| parse_config |
main.py | import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
from scipy.stats import pearsonr
from bin_data import bin_data
# import pixel data
right_z_pixel_change = np.load("right_z_pixel_change.npy")
left_z_pixel_change = np.load("left_z_pixel_change.npy")
front_z_pixel_change = np.load("front_z_pixel_change.npy")
# average pixel change across front, left & right fovs
pixel_change = np.vstack((left_z_pixel_change, front_z_pixel_change, right_z_pixel_change)).mean(axis=0)
# import rate change data
dat = pd.read_pickle("df_population_vector_change.p")
# Clean the data (sequential data points are 1cm apart along trajectory)
dat = dat[dat.environment == 'D']
df = dat.filter(['animal', 'x_coord', 'y_coord', 'direction', 'timestamp'], axis=1)
dat = dat[~df.isnull().any(axis=1)]
good_pixel_ids = np.array(np.diff(dat.x_coord)**2 + np.diff(dat.y_coord)**2 < 1.01, dtype=bool)
pixel_change = pixel_change[good_pixel_ids]
good_rate_ids = np.append(False, good_pixel_ids)
turning_rate = np.abs(np.diff(dat['direction'])) % 360
turning_rate = turning_rate[good_pixel_ids]
dat = dat[good_rate_ids]
# z-score data
dat['rate change\n(euclidean)'] = (dat['rate change\n(euclidean)'] - np.mean(dat['rate change\n(euclidean)']))/np.std(dat['rate change\n(euclidean)'])
pixel_change = (pixel_change - np.mean(pixel_change))/np.std(pixel_change)
# Plot Occupancy
occupancy = bin_data([dat.x_coord, dat.y_coord], bin_size = 4, limits = [(0, 350), (0, 250)])
plt.imshow(occupancy.T, origin='upper', cmap=plt.get_cmap('jet'))
plt.title('Occupancy')
plt.show()
# Plot pixel change across space
pixel_change_map = bin_data([dat.x_coord, dat.y_coord], bin_size = 4, limits = [(0, 350), (0, 250)], var_to_bin = pixel_change) / occupancy
plt.imshow(pixel_change_map.T, origin='upper', cmap=plt.get_cmap('jet'))
plt.axis('off')
plt.clim([-1.5,1.5])
plt.title('Pixel Change Map')
plt.show()
# Plot firing rate change across space
rate_change_map = bin_data([dat.x_coord, dat.y_coord], bin_size = 4, limits = [(0, 350), (0, 250)], var_to_bin = dat['rate change\n(euclidean)']) / occupancy
plt.imshow(rate_change_map.T, origin='upper', cmap=plt.get_cmap('jet'))
plt.axis('off')
plt.clim([-1.5,1.5])
plt.title('Rate Change Map')
plt.show()
corr, _ = pearsonr(pixel_change, dat['rate change\n(euclidean)'])
print('Rate Change vs Pixel Change Pearson r = %.3f' % corr)
# Filter bits of trajectory by head direction
north_ids = (np.degrees(dat.direction) % 360 >= 315) | (np.degrees(dat.direction) % 360 < 45)
north_occupancy = bin_data([dat.x_coord[north_ids], dat.y_coord[north_ids]], bin_size = 4, limits = [(0, 350), (0, 250)])
south_ids = (np.degrees(dat.direction) % 360 >= 135) & (np.degrees(dat.direction) % 360 < 225)
south_occupancy = bin_data([dat.x_coord[south_ids], dat.y_coord[south_ids]], bin_size = 4, limits = [(0, 350), (0, 250)])
east_ids = (np.degrees(dat.direction) % 360 >= 45) & (np.degrees(dat.direction) % 360 < 135)
east_occupancy = bin_data([dat.x_coord[east_ids], dat.y_coord[east_ids]], bin_size = 4, limits = [(0, 350), (0, 250)])
west_ids = (np.degrees(dat.direction) % 360 >= 225) & (np.degrees(dat.direction) % 360 < 315)
west_occupancy = bin_data([dat.x_coord[west_ids], dat.y_coord[west_ids]], bin_size = 4, limits = [(0, 350), (0, 250)])
cmap = plt.get_cmap('jet')
cmap.set_bad('w',1.)
# Calculate pixel and rate change maps by heading direction
north_pix_map = bin_data([dat.x_coord[north_ids], dat.y_coord[north_ids]], bin_size = 4, limits = [(0, 350), (0, 250)], var_to_bin = pixel_change[north_ids]) / north_occupancy
south_pix_map = bin_data([dat.x_coord[south_ids], dat.y_coord[south_ids]], bin_size = 4, limits = [(0, 350), (0, 250)], var_to_bin = pixel_change[south_ids]) / south_occupancy
east_pix_map = bin_data([dat.x_coord[east_ids], dat.y_coord[east_ids]], bin_size = 4, limits = [(0, 350), (0, 250)], var_to_bin = pixel_change[east_ids]) / east_occupancy | north_rat_map = bin_data([dat.x_coord[north_ids], dat.y_coord[north_ids]], bin_size = 4, limits = [(0, 350), (0, 250)], var_to_bin = dat['rate change\n(euclidean)'][north_ids]) / north_occupancy
south_rat_map = bin_data([dat.x_coord[south_ids], dat.y_coord[south_ids]], bin_size = 4, limits = [(0, 350), (0, 250)], var_to_bin = dat['rate change\n(euclidean)'][south_ids]) / south_occupancy
east_rat_map = bin_data([dat.x_coord[east_ids], dat.y_coord[east_ids]], bin_size = 4, limits = [(0, 350), (0, 250)], var_to_bin = dat['rate change\n(euclidean)'][east_ids]) / east_occupancy
west_rat_map = bin_data([dat.x_coord[west_ids], dat.y_coord[west_ids]], bin_size = 4, limits = [(0, 350), (0, 250)], var_to_bin = dat['rate change\n(euclidean)'][west_ids]) / west_occupancy
c_lo = -1.5
c_hi = 1.5
# Plot change maps filtered by direction
plt.subplot(3,3,2)
plt.title('Unfolded Pixel Change Map')
plt.imshow(west_pix_map.T, origin='upper', cmap=cmap)
plt.clim([c_lo,c_hi])
plt.axis('off')
plt.subplot(3,3,4)
plt.imshow(south_pix_map.T, origin='upper', cmap=cmap)
plt.clim([c_lo,c_hi])
plt.axis('off')
plt.subplot(3,3,5)
plt.imshow(pixel_change_map.T, origin='upper', cmap=cmap)
plt.clim([c_lo,c_hi])
plt.axis('off')
plt.subplot(3,3,6)
plt.imshow(north_pix_map.T, origin='upper', cmap=cmap)
plt.clim([c_lo,c_hi])
plt.axis('off')
plt.subplot(3,3,8)
plt.imshow(east_pix_map.T, origin='upper', cmap=cmap)
plt.clim([c_lo,c_hi])
plt.axis('off')
plt.show()
plt.subplot(3,3,2)
plt.title('Unfolded Rate Change Map')
plt.imshow(west_rat_map.T, origin='upper', cmap=cmap)
plt.clim([c_lo,c_hi])
plt.axis('off')
plt.subplot(3,3,4)
plt.imshow(south_rat_map.T, origin='upper', cmap=cmap)
plt.clim([c_lo,c_hi])
plt.axis('off')
plt.subplot(3,3,5)
plt.imshow(rate_change_map.T, origin='upper', cmap=cmap)
plt.clim([c_lo,c_hi])
plt.axis('off')
plt.subplot(3,3,6)
plt.imshow(north_rat_map.T, origin='upper', cmap=cmap)
plt.clim([c_lo,c_hi])
plt.axis('off')
plt.subplot(3,3,8)
plt.imshow(east_rat_map.T, origin='upper', cmap=cmap)
plt.clim([c_lo,c_hi])
plt.axis('off')
plt.show() | west_pix_map = bin_data([dat.x_coord[west_ids], dat.y_coord[west_ids]], bin_size = 4, limits = [(0, 350), (0, 250)], var_to_bin = pixel_change[west_ids]) / west_occupancy |
build.rs | tauri_build::build()
} | fn main() { |
|
123.js | (window["webpackJsonp"] = window["webpackJsonp"] || []).push([[123],{
/***/ "./node_modules/css-loader/dist/cjs.js?!./node_modules/postcss-loader/src/index.js?!./node_modules/quill/dist/quill.bubble.css":
/*!**********************************************************************************************************************************************!*\
!*** ./node_modules/css-loader/dist/cjs.js??ref--7-1!./node_modules/postcss-loader/src??ref--7-2!./node_modules/quill/dist/quill.bubble.css ***!
\**********************************************************************************************************************************************/
/*! no static exports found */
/***/ (function(module, exports, __webpack_require__) {
// Imports
var ___CSS_LOADER_API_IMPORT___ = __webpack_require__(/*! ../../css-loader/dist/runtime/api.js */ "./node_modules/css-loader/dist/runtime/api.js");
exports = ___CSS_LOADER_API_IMPORT___(false);
// Module
exports.push([module.i, "/*!\n * Quill Editor v1.3.7\n * https://quilljs.com/\n * Copyright (c) 2014, Jason Chen\n * Copyright (c) 2013, salesforce.com\n */\n\n.ql-container {\n box-sizing: border-box;\n font-family: Helvetica, Arial, sans-serif;\n font-size: 13px;\n height: 100%;\n position: relative;\n}\n\n[dir] .ql-container {\n margin: 0px;\n}\n\n.ql-container.ql-disabled .ql-tooltip {\n visibility: hidden;\n}\n\n.ql-container.ql-disabled .ql-editor ul[data-checked] > li::before {\n pointer-events: none;\n}\n\n.ql-clipboard {\n height: 1px;\n overflow-y: hidden;\n position: absolute;\n top: 50%;\n}\n\n[dir=ltr] .ql-clipboard {\n left: -100000px;\n}\n\n[dir=rtl] .ql-clipboard {\n right: -100000px;\n}\n\n[dir] .ql-clipboard p {\n margin: 0;\n padding: 0;\n}\n\n.ql-editor {\n box-sizing: border-box;\n line-height: 1.42;\n height: 100%;\n outline: none;\n overflow-y: auto;\n -o-tab-size: 4;\n tab-size: 4;\n -moz-tab-size: 4;\n white-space: pre-wrap;\n word-wrap: break-word;\n}\n\n[dir] .ql-editor {\n padding: 12px 15px;\n}\n\n[dir=ltr] .ql-editor {\n text-align: left;\n}\n\n[dir=rtl] .ql-editor {\n text-align: right;\n}\n\n[dir] .ql-editor > * {\n cursor: text;\n}\n\n.ql-editor p,\n.ql-editor ol,\n.ql-editor ul,\n.ql-editor pre,\n.ql-editor blockquote,\n.ql-editor h1,\n.ql-editor h2,\n.ql-editor h3,\n.ql-editor h4,\n.ql-editor h5,\n.ql-editor h6 {\n counter-reset: list-1 list-2 list-3 list-4 list-5 list-6 list-7 list-8 list-9;\n}\n\n[dir] .ql-editor p, [dir] .ql-editor ol, [dir] .ql-editor ul, [dir] .ql-editor pre, [dir] .ql-editor blockquote, [dir] .ql-editor h1, [dir] .ql-editor h2, [dir] .ql-editor h3, [dir] .ql-editor h4, [dir] .ql-editor h5, [dir] .ql-editor h6 {\n margin: 0;\n padding: 0;\n}\n\n[dir=ltr] .ql-editor ol, [dir=ltr] .ql-editor ul {\n padding-left: 1.5em;\n}\n\n[dir=rtl] .ql-editor ol, [dir=rtl] .ql-editor ul {\n padding-right: 1.5em;\n}\n\n.ql-editor ol > li,\n.ql-editor ul > li {\n list-style-type: none;\n}\n\n.ql-editor ul > li::before {\n content: '\\2022';\n}\n\n.ql-editor ul[data-checked=true],\n.ql-editor ul[data-checked=false] {\n pointer-events: none;\n}\n\n.ql-editor ul[data-checked=true] > li *,\n.ql-editor ul[data-checked=false] > li * {\n pointer-events: all;\n}\n\n.ql-editor ul[data-checked=true] > li::before,\n.ql-editor ul[data-checked=false] > li::before {\n color: #777;\n pointer-events: all;\n}\n\n[dir] .ql-editor ul[data-checked=true] > li::before, [dir] .ql-editor ul[data-checked=false] > li::before {\n cursor: pointer;\n}\n\n.ql-editor ul[data-checked=true] > li::before {\n content: '\\2611';\n}\n\n.ql-editor ul[data-checked=false] > li::before {\n content: '\\2610';\n}\n\n.ql-editor li::before {\n display: inline-block;\n white-space: nowrap;\n width: 1.2em;\n}\n\n[dir=ltr] .ql-editor li:not(.ql-direction-rtl)::before {\n margin-left: -1.5em;\n margin-right: 0.3em;\n text-align: right;\n}\n\n[dir=rtl] .ql-editor li:not(.ql-direction-rtl)::before {\n margin-right: -1.5em;\n margin-left: 0.3em;\n text-align: left;\n}\n\n[dir=ltr] .ql-editor li.ql-direction-rtl::before {\n margin-left: 0.3em;\n margin-right: -1.5em;\n}\n\n[dir=rtl] .ql-editor li.ql-direction-rtl::before {\n margin-right: 0.3em;\n margin-left: -1.5em;\n}\n\n[dir=ltr] .ql-editor ol li:not(.ql-direction-rtl), [dir=ltr] .ql-editor ul li:not(.ql-direction-rtl) {\n padding-left: 1.5em;\n}\n\n[dir=rtl] .ql-editor ol li:not(.ql-direction-rtl), [dir=rtl] .ql-editor ul li:not(.ql-direction-rtl) {\n padding-right: 1.5em;\n}\n\n[dir=ltr] .ql-editor ol li.ql-direction-rtl, [dir=ltr] .ql-editor ul li.ql-direction-rtl {\n padding-right: 1.5em;\n}\n\n[dir=rtl] .ql-editor ol li.ql-direction-rtl, [dir=rtl] .ql-editor ul li.ql-direction-rtl {\n padding-left: 1.5em;\n}\n\n.ql-editor ol li {\n counter-reset: list-1 list-2 list-3 list-4 list-5 list-6 list-7 list-8 list-9;\n counter-increment: list-0;\n}\n\n.ql-editor ol li:before {\n content: counter(list-0, decimal) '. ';\n}\n\n.ql-editor ol li.ql-indent-1 {\n counter-increment: list-1;\n}\n\n.ql-editor ol li.ql-indent-1:before {\n content: counter(list-1, lower-alpha) '. ';\n}\n\n.ql-editor ol li.ql-indent-1 {\n counter-reset: list-2 list-3 list-4 list-5 list-6 list-7 list-8 list-9;\n}\n\n.ql-editor ol li.ql-indent-2 {\n counter-increment: list-2;\n}\n\n.ql-editor ol li.ql-indent-2:before {\n content: counter(list-2, lower-roman) '. ';\n}\n\n.ql-editor ol li.ql-indent-2 {\n counter-reset: list-3 list-4 list-5 list-6 list-7 list-8 list-9;\n}\n\n.ql-editor ol li.ql-indent-3 {\n counter-increment: list-3;\n}\n\n.ql-editor ol li.ql-indent-3:before {\n content: counter(list-3, decimal) '. ';\n}\n\n.ql-editor ol li.ql-indent-3 {\n counter-reset: list-4 list-5 list-6 list-7 list-8 list-9;\n}\n\n.ql-editor ol li.ql-indent-4 {\n counter-increment: list-4;\n}\n\n.ql-editor ol li.ql-indent-4:before {\n content: counter(list-4, lower-alpha) '. ';\n}\n\n.ql-editor ol li.ql-indent-4 {\n counter-reset: list-5 list-6 list-7 list-8 list-9;\n}\n\n.ql-editor ol li.ql-indent-5 {\n counter-increment: list-5;\n}\n\n.ql-editor ol li.ql-indent-5:before {\n content: counter(list-5, lower-roman) '. ';\n}\n\n.ql-editor ol li.ql-indent-5 {\n counter-reset: list-6 list-7 list-8 list-9;\n}\n\n.ql-editor ol li.ql-indent-6 {\n counter-increment: list-6;\n}\n\n.ql-editor ol li.ql-indent-6:before {\n content: counter(list-6, decimal) '. ';\n}\n\n.ql-editor ol li.ql-indent-6 {\n counter-reset: list-7 list-8 list-9;\n}\n\n.ql-editor ol li.ql-indent-7 {\n counter-increment: list-7;\n}\n\n.ql-editor ol li.ql-indent-7:before {\n content: counter(list-7, lower-alpha) '. ';\n}\n\n.ql-editor ol li.ql-indent-7 {\n counter-reset: list-8 list-9;\n}\n\n.ql-editor ol li.ql-indent-8 {\n counter-increment: list-8;\n}\n\n.ql-editor ol li.ql-indent-8:before {\n content: counter(list-8, lower-roman) '. ';\n}\n\n.ql-editor ol li.ql-indent-8 {\n counter-reset: list-9;\n}\n\n.ql-editor ol li.ql-indent-9 {\n counter-increment: list-9;\n}\n\n.ql-editor ol li.ql-indent-9:before {\n content: counter(list-9, decimal) '. ';\n}\n\n[dir=ltr] .ql-editor .ql-indent-1:not(.ql-direction-rtl) {\n padding-left: 3em;\n}\n\n[dir=rtl] .ql-editor .ql-indent-1:not(.ql-direction-rtl) {\n padding-right: 3em;\n}\n\n[dir=ltr] .ql-editor li.ql-indent-1:not(.ql-direction-rtl) {\n padding-left: 4.5em;\n}\n\n[dir=rtl] .ql-editor li.ql-indent-1:not(.ql-direction-rtl) {\n padding-right: 4.5em;\n}\n\n[dir=ltr] .ql-editor .ql-indent-1.ql-direction-rtl.ql-align-right {\n padding-right: 3em;\n}\n\n[dir=rtl] .ql-editor .ql-indent-1.ql-direction-rtl.ql-align-right {\n padding-left: 3em;\n}\n\n[dir=ltr] .ql-editor li.ql-indent-1.ql-direction-rtl.ql-align-right {\n padding-right: 4.5em;\n}\n\n[dir=rtl] .ql-editor li.ql-indent-1.ql-direction-rtl.ql-align-right {\n padding-left: 4.5em;\n}\n\n[dir=ltr] .ql-editor .ql-indent-2:not(.ql-direction-rtl) {\n padding-left: 6em;\n}\n\n[dir=rtl] .ql-editor .ql-indent-2:not(.ql-direction-rtl) {\n padding-right: 6em;\n}\n\n[dir=ltr] .ql-editor li.ql-indent-2:not(.ql-direction-rtl) {\n padding-left: 7.5em;\n}\n\n[dir=rtl] .ql-editor li.ql-indent-2:not(.ql-direction-rtl) {\n padding-right: 7.5em;\n}\n\n[dir=ltr] .ql-editor .ql-indent-2.ql-direction-rtl.ql-align-right {\n padding-right: 6em;\n}\n\n[dir=rtl] .ql-editor .ql-indent-2.ql-direction-rtl.ql-align-right {\n padding-left: 6em;\n}\n\n[dir=ltr] .ql-editor li.ql-indent-2.ql-direction-rtl.ql-align-right {\n padding-right: 7.5em;\n}\n\n[dir=rtl] .ql-editor li.ql-indent-2.ql-direction-rtl.ql-align-right {\n padding-left: 7.5em;\n}\n\n[dir=ltr] .ql-editor .ql-indent-3:not(.ql-direction-rtl) {\n padding-left: 9em;\n}\n\n[dir=rtl] .ql-editor .ql-indent-3:not(.ql-direction-rtl) {\n padding-right: 9em;\n}\n\n[dir=ltr] .ql-editor li.ql-indent-3:not(.ql-direction-rtl) {\n padding-left: 10.5em;\n}\n\n[dir=rtl] .ql-editor li.ql-indent-3:not(.ql-direction-rtl) {\n padding-right: 10.5em;\n}\n\n[dir=ltr] .ql-editor .ql-indent-3.ql-direction-rtl.ql-align-right {\n padding-right: 9em;\n}\n\n[dir=rtl] .ql-editor .ql-indent-3.ql-direction-rtl.ql-align-right {\n padding-left: 9em;\n}\n\n[dir=ltr] .ql-editor li.ql-indent-3.ql-direction-rtl.ql-align-right {\n padding-right: 10.5em;\n}\n\n[dir=rtl] .ql-editor li.ql-indent-3.ql-direction-rtl.ql-align-right {\n padding-left: 10.5em;\n}\n\n[dir=ltr] .ql-editor .ql-indent-4:not(.ql-direction-rtl) {\n padding-left: 12em;\n}\n\n[dir=rtl] .ql-editor .ql-indent-4:not(.ql-direction-rtl) {\n padding-right: 12em;\n}\n\n[dir=ltr] .ql-editor li.ql-indent-4:not(.ql-direction-rtl) {\n padding-left: 13.5em;\n}\n\n[dir=rtl] .ql-editor li.ql-indent-4:not(.ql-direction-rtl) {\n padding-right: 13.5em;\n}\n\n[dir=ltr] .ql-editor .ql-indent-4.ql-direction-rtl.ql-align-right {\n padding-right: 12em;\n}\n\n[dir=rtl] .ql-editor .ql-indent-4.ql-direction-rtl.ql-align-right {\n padding-left: 12em;\n}\n\n[dir=ltr] .ql-editor li.ql-indent-4.ql-direction-rtl.ql-align-right {\n padding-right: 13.5em;\n}\n\n[dir=rtl] .ql-editor li.ql-indent-4.ql-direction-rtl.ql-align-right {\n padding-left: 13.5em;\n}\n\n[dir=ltr] .ql-editor .ql-indent-5:not(.ql-direction-rtl) {\n padding-left: 15em;\n}\n\n[dir=rtl] .ql-editor .ql-indent-5:not(.ql-direction-rtl) {\n padding-right: 15em;\n}\n\n[dir=ltr] .ql-editor li.ql-indent-5:not(.ql-direction-rtl) {\n padding-left: 16.5em;\n}\n\n[dir=rtl] .ql-editor li.ql-indent-5:not(.ql-direction-rtl) {\n padding-right: 16.5em;\n}\n\n[dir=ltr] .ql-editor .ql-indent-5.ql-direction-rtl.ql-align-right {\n padding-right: 15em;\n}\n\n[dir=rtl] .ql-editor .ql-indent-5.ql-direction-rtl.ql-align-right {\n padding-left: 15em;\n}\n\n[dir=ltr] .ql-editor li.ql-indent-5.ql-direction-rtl.ql-align-right {\n padding-right: 16.5em;\n}\n\n[dir=rtl] .ql-editor li.ql-indent-5.ql-direction-rtl.ql-align-right {\n padding-left: 16.5em;\n}\n\n[dir=ltr] .ql-editor .ql-indent-6:not(.ql-direction-rtl) {\n padding-left: 18em;\n}\n\n[dir=rtl] .ql-editor .ql-indent-6:not(.ql-direction-rtl) {\n padding-right: 18em;\n}\n\n[dir=ltr] .ql-editor li.ql-indent-6:not(.ql-direction-rtl) {\n padding-left: 19.5em;\n}\n\n[dir=rtl] .ql-editor li.ql-indent-6:not(.ql-direction-rtl) {\n padding-right: 19.5em;\n}\n\n[dir=ltr] .ql-editor .ql-indent-6.ql-direction-rtl.ql-align-right {\n padding-right: 18em;\n}\n\n[dir=rtl] .ql-editor .ql-indent-6.ql-direction-rtl.ql-align-right {\n padding-left: 18em;\n}\n\n[dir=ltr] .ql-editor li.ql-indent-6.ql-direction-rtl.ql-align-right {\n padding-right: 19.5em;\n}\n\n[dir=rtl] .ql-editor li.ql-indent-6.ql-direction-rtl.ql-align-right {\n padding-left: 19.5em;\n}\n\n[dir=ltr] .ql-editor .ql-indent-7:not(.ql-direction-rtl) {\n padding-left: 21em;\n}\n\n[dir=rtl] .ql-editor .ql-indent-7:not(.ql-direction-rtl) {\n padding-right: 21em;\n}\n\n[dir=ltr] .ql-editor li.ql-indent-7:not(.ql-direction-rtl) {\n padding-left: 22.5em;\n}\n\n[dir=rtl] .ql-editor li.ql-indent-7:not(.ql-direction-rtl) {\n padding-right: 22.5em;\n}\n\n[dir=ltr] .ql-editor .ql-indent-7.ql-direction-rtl.ql-align-right {\n padding-right: 21em;\n}\n\n[dir=rtl] .ql-editor .ql-indent-7.ql-direction-rtl.ql-align-right {\n padding-left: 21em;\n}\n\n[dir=ltr] .ql-editor li.ql-indent-7.ql-direction-rtl.ql-align-right {\n padding-right: 22.5em;\n}\n\n[dir=rtl] .ql-editor li.ql-indent-7.ql-direction-rtl.ql-align-right {\n padding-left: 22.5em;\n}\n\n[dir=ltr] .ql-editor .ql-indent-8:not(.ql-direction-rtl) {\n padding-left: 24em;\n}\n\n[dir=rtl] .ql-editor .ql-indent-8:not(.ql-direction-rtl) {\n padding-right: 24em;\n}\n\n[dir=ltr] .ql-editor li.ql-indent-8:not(.ql-direction-rtl) {\n padding-left: 25.5em;\n}\n\n[dir=rtl] .ql-editor li.ql-indent-8:not(.ql-direction-rtl) {\n padding-right: 25.5em;\n}\n\n[dir=ltr] .ql-editor .ql-indent-8.ql-direction-rtl.ql-align-right {\n padding-right: 24em;\n}\n\n[dir=rtl] .ql-editor .ql-indent-8.ql-direction-rtl.ql-align-right {\n padding-left: 24em;\n}\n\n[dir=ltr] .ql-editor li.ql-indent-8.ql-direction-rtl.ql-align-right {\n padding-right: 25.5em;\n}\n\n[dir=rtl] .ql-editor li.ql-indent-8.ql-direction-rtl.ql-align-right {\n padding-left: 25.5em;\n}\n\n[dir=ltr] .ql-editor .ql-indent-9:not(.ql-direction-rtl) {\n padding-left: 27em;\n}\n\n[dir=rtl] .ql-editor .ql-indent-9:not(.ql-direction-rtl) {\n padding-right: 27em;\n}\n\n[dir=ltr] .ql-editor li.ql-indent-9:not(.ql-direction-rtl) {\n padding-left: 28.5em;\n}\n\n[dir=rtl] .ql-editor li.ql-indent-9:not(.ql-direction-rtl) {\n padding-right: 28.5em;\n}\n\n[dir=ltr] .ql-editor .ql-indent-9.ql-direction-rtl.ql-align-right {\n padding-right: 27em;\n}\n\n[dir=rtl] .ql-editor .ql-indent-9.ql-direction-rtl.ql-align-right {\n padding-left: 27em;\n}\n\n[dir=ltr] .ql-editor li.ql-indent-9.ql-direction-rtl.ql-align-right {\n padding-right: 28.5em;\n}\n\n[dir=rtl] .ql-editor li.ql-indent-9.ql-direction-rtl.ql-align-right {\n padding-left: 28.5em;\n}\n\n.ql-editor .ql-video {\n display: block;\n max-width: 100%;\n}\n\n[dir] .ql-editor .ql-video.ql-align-center {\n margin: 0 auto;\n}\n\n[dir=ltr] .ql-editor .ql-video.ql-align-right {\n margin: 0 0 0 auto;\n}\n\n[dir=rtl] .ql-editor .ql-video.ql-align-right {\n margin: 0 auto 0 0;\n}\n\n[dir] .ql-editor .ql-bg-black {\n background-color: #000;\n}\n\n[dir] .ql-editor .ql-bg-red {\n background-color: #e60000;\n}\n\n[dir] .ql-editor .ql-bg-orange {\n background-color: #f90;\n}\n\n[dir] .ql-editor .ql-bg-yellow {\n background-color: #ff0;\n}\n\n[dir] .ql-editor .ql-bg-green {\n background-color: #008a00;\n}\n\n[dir] .ql-editor .ql-bg-blue {\n background-color: #06c;\n}\n\n[dir] .ql-editor .ql-bg-purple {\n background-color: #93f;\n}\n\n.ql-editor .ql-color-white {\n color: #fff;\n}\n\n.ql-editor .ql-color-red {\n color: #e60000;\n}\n\n.ql-editor .ql-color-orange {\n color: #f90;\n}\n\n.ql-editor .ql-color-yellow {\n color: #ff0;\n}\n\n.ql-editor .ql-color-green {\n color: #008a00;\n}\n\n.ql-editor .ql-color-blue {\n color: #06c;\n}\n\n.ql-editor .ql-color-purple {\n color: #93f;\n}\n\n.ql-editor .ql-font-serif {\n font-family: Georgia, Times New Roman, serif;\n}\n\n.ql-editor .ql-font-monospace {\n font-family: Monaco, Courier New, monospace;\n}\n\n.ql-editor .ql-size-small {\n font-size: 0.75em;\n}\n\n.ql-editor .ql-size-large {\n font-size: 1.5em;\n}\n\n.ql-editor .ql-size-huge {\n font-size: 2.5em;\n}\n\n[dir] .ql-editor .ql-direction-rtl {\n text-align: inherit;\n}\n\n[dir=ltr] .ql-editor .ql-direction-rtl {\n direction: rtl;\n}\n\n[dir=rtl] .ql-editor .ql-direction-rtl {\n direction: ltr;\n}\n\n[dir] .ql-editor .ql-align-center {\n text-align: center;\n}\n\n[dir] .ql-editor .ql-align-justify {\n text-align: justify;\n}\n\n[dir=ltr] .ql-editor .ql-align-right {\n text-align: right;\n}\n\n[dir=rtl] .ql-editor .ql-align-right {\n text-align: left;\n}\n\n.ql-editor.ql-blank::before {\n color: rgba(0,0,0,0.6);\n content: attr(data-placeholder);\n font-style: italic;\n pointer-events: none;\n position: absolute;\n}\n\n[dir=ltr] .ql-editor.ql-blank::before {\n left: 15px;\n right: 15px;\n}\n\n[dir=rtl] .ql-editor.ql-blank::before {\n right: 15px;\n left: 15px;\n}\n\n.ql-bubble.ql-toolbar:after,\n.ql-bubble .ql-toolbar:after {\n content: '';\n display: table;\n}\n\n[dir] .ql-bubble.ql-toolbar:after, [dir] .ql-bubble .ql-toolbar:after {\n clear: both;\n}\n\n.ql-bubble.ql-toolbar button,\n.ql-bubble .ql-toolbar button {\n display: inline-block;\n height: 24px;\n width: 28px;\n}\n\n[dir] .ql-bubble.ql-toolbar button, [dir] .ql-bubble .ql-toolbar button {\n background: none;\n border: none;\n cursor: pointer;\n padding: 3px 5px;\n}\n\n[dir=ltr] .ql-bubble.ql-toolbar button, [dir=ltr] .ql-bubble .ql-toolbar button {\n float: left;\n}\n\n[dir=rtl] .ql-bubble.ql-toolbar button, [dir=rtl] .ql-bubble .ql-toolbar button {\n float: right;\n}\n\n.ql-bubble.ql-toolbar button svg,\n.ql-bubble .ql-toolbar button svg {\n height: 100%;\n}\n\n[dir=ltr] .ql-bubble.ql-toolbar button svg, [dir=ltr] .ql-bubble .ql-toolbar button svg {\n float: left;\n}\n\n[dir=rtl] .ql-bubble.ql-toolbar button svg, [dir=rtl] .ql-bubble .ql-toolbar button svg {\n float: right;\n}\n\n.ql-bubble.ql-toolbar button:active:hover,\n.ql-bubble .ql-toolbar button:active:hover {\n outline: none;\n}\n\n.ql-bubble.ql-toolbar input.ql-image[type=file],\n.ql-bubble .ql-toolbar input.ql-image[type=file] {\n display: none;\n}\n\n.ql-bubble.ql-toolbar button:hover,\n.ql-bubble .ql-toolbar button:hover,\n.ql-bubble.ql-toolbar button:focus,\n.ql-bubble .ql-toolbar button:focus,\n.ql-bubble.ql-toolbar button.ql-active,\n.ql-bubble .ql-toolbar button.ql-active,\n.ql-bubble.ql-toolbar .ql-picker-label:hover,\n.ql-bubble .ql-toolbar .ql-picker-label:hover,\n.ql-bubble.ql-toolbar .ql-picker-label.ql-active,\n.ql-bubble .ql-toolbar .ql-picker-label.ql-active,\n.ql-bubble.ql-toolbar .ql-picker-item:hover,\n.ql-bubble .ql-toolbar .ql-picker-item:hover,\n.ql-bubble.ql-toolbar .ql-picker-item.ql-selected,\n.ql-bubble .ql-toolbar .ql-picker-item.ql-selected {\n color: #fff;\n}\n\n.ql-bubble.ql-toolbar button:hover .ql-fill,\n.ql-bubble .ql-toolbar button:hover .ql-fill,\n.ql-bubble.ql-toolbar button:focus .ql-fill,\n.ql-bubble .ql-toolbar button:focus .ql-fill,\n.ql-bubble.ql-toolbar button.ql-active .ql-fill,\n.ql-bubble .ql-toolbar button.ql-active .ql-fill,\n.ql-bubble.ql-toolbar .ql-picker-label:hover .ql-fill,\n.ql-bubble .ql-toolbar .ql-picker-label:hover .ql-fill,\n.ql-bubble.ql-toolbar .ql-picker-label.ql-active .ql-fill,\n.ql-bubble .ql-toolbar .ql-picker-label.ql-active .ql-fill,\n.ql-bubble.ql-toolbar .ql-picker-item:hover .ql-fill,\n.ql-bubble .ql-toolbar .ql-picker-item:hover .ql-fill,\n.ql-bubble.ql-toolbar .ql-picker-item.ql-selected .ql-fill,\n.ql-bubble .ql-toolbar .ql-picker-item.ql-selected .ql-fill,\n.ql-bubble.ql-toolbar button:hover .ql-stroke.ql-fill,\n.ql-bubble .ql-toolbar button:hover .ql-stroke.ql-fill,\n.ql-bubble.ql-toolbar button:focus .ql-stroke.ql-fill,\n.ql-bubble .ql-toolbar button:focus .ql-stroke.ql-fill,\n.ql-bubble.ql-toolbar button.ql-active .ql-stroke.ql-fill,\n.ql-bubble .ql-toolbar button.ql-active .ql-stroke.ql-fill,\n.ql-bubble.ql-toolbar .ql-picker-label:hover .ql-stroke.ql-fill,\n.ql-bubble .ql-toolbar .ql-picker-label:hover .ql-stroke.ql-fill,\n.ql-bubble.ql-toolbar .ql-picker-label.ql-active .ql-stroke.ql-fill,\n.ql-bubble .ql-toolbar .ql-picker-label.ql-active .ql-stroke.ql-fill,\n.ql-bubble.ql-toolbar .ql-picker-item:hover .ql-stroke.ql-fill,\n.ql-bubble .ql-toolbar .ql-picker-item:hover .ql-stroke.ql-fill,\n.ql-bubble.ql-toolbar .ql-picker-item.ql-selected .ql-stroke.ql-fill,\n.ql-bubble .ql-toolbar .ql-picker-item.ql-selected .ql-stroke.ql-fill {\n fill: #fff;\n}\n\n.ql-bubble.ql-toolbar button:hover .ql-stroke,\n.ql-bubble .ql-toolbar button:hover .ql-stroke,\n.ql-bubble.ql-toolbar button:focus .ql-stroke,\n.ql-bubble .ql-toolbar button:focus .ql-stroke,\n.ql-bubble.ql-toolbar button.ql-active .ql-stroke,\n.ql-bubble .ql-toolbar button.ql-active .ql-stroke,\n.ql-bubble.ql-toolbar .ql-picker-label:hover .ql-stroke,\n.ql-bubble .ql-toolbar .ql-picker-label:hover .ql-stroke,\n.ql-bubble.ql-toolbar .ql-picker-label.ql-active .ql-stroke,\n.ql-bubble .ql-toolbar .ql-picker-label.ql-active .ql-stroke,\n.ql-bubble.ql-toolbar .ql-picker-item:hover .ql-stroke,\n.ql-bubble .ql-toolbar .ql-picker-item:hover .ql-stroke,\n.ql-bubble.ql-toolbar .ql-picker-item.ql-selected .ql-stroke,\n.ql-bubble .ql-toolbar .ql-picker-item.ql-selected .ql-stroke,\n.ql-bubble.ql-toolbar button:hover .ql-stroke-miter,\n.ql-bubble .ql-toolbar button:hover .ql-stroke-miter,\n.ql-bubble.ql-toolbar button:focus .ql-stroke-miter,\n.ql-bubble .ql-toolbar button:focus .ql-stroke-miter,\n.ql-bubble.ql-toolbar button.ql-active .ql-stroke-miter,\n.ql-bubble .ql-toolbar button.ql-active .ql-stroke-miter,\n.ql-bubble.ql-toolbar .ql-picker-label:hover .ql-stroke-miter,\n.ql-bubble .ql-toolbar .ql-picker-label:hover .ql-stroke-miter,\n.ql-bubble.ql-toolbar .ql-picker-label.ql-active .ql-stroke-miter,\n.ql-bubble .ql-toolbar .ql-picker-label.ql-active .ql-stroke-miter,\n.ql-bubble.ql-toolbar .ql-picker-item:hover .ql-stroke-miter,\n.ql-bubble .ql-toolbar .ql-picker-item:hover .ql-stroke-miter,\n.ql-bubble.ql-toolbar .ql-picker-item.ql-selected .ql-stroke-miter,\n.ql-bubble .ql-toolbar .ql-picker-item.ql-selected .ql-stroke-miter {\n stroke: #fff;\n}\n\n@media (pointer: coarse) {\n .ql-bubble.ql-toolbar button:hover:not(.ql-active),\n .ql-bubble .ql-toolbar button:hover:not(.ql-active) {\n color: #ccc;\n }\n\n .ql-bubble.ql-toolbar button:hover:not(.ql-active) .ql-fill,\n .ql-bubble .ql-toolbar button:hover:not(.ql-active) .ql-fill,\n .ql-bubble.ql-toolbar button:hover:not(.ql-active) .ql-stroke.ql-fill,\n .ql-bubble .ql-toolbar button:hover:not(.ql-active) .ql-stroke.ql-fill {\n fill: #ccc;\n }\n\n .ql-bubble.ql-toolbar button:hover:not(.ql-active) .ql-stroke,\n .ql-bubble .ql-toolbar button:hover:not(.ql-active) .ql-stroke,\n .ql-bubble.ql-toolbar button:hover:not(.ql-active) .ql-stroke-miter,\n .ql-bubble .ql-toolbar button:hover:not(.ql-active) .ql-stroke-miter {\n stroke: #ccc;\n }\n}\n\n.ql-bubble {\n box-sizing: border-box;\n}\n\n.ql-bubble * {\n box-sizing: border-box;\n}\n\n.ql-bubble .ql-hidden {\n display: none;\n}\n\n.ql-bubble .ql-out-bottom,\n.ql-bubble .ql-out-top {\n visibility: hidden;\n}\n\n.ql-bubble .ql-tooltip {\n position: absolute;\n}\n\n[dir] .ql-bubble .ql-tooltip {\n transform: translateY(10px);\n}\n\n.ql-bubble .ql-tooltip a {\n text-decoration: none;\n}\n\n[dir] .ql-bubble .ql-tooltip a {\n cursor: pointer;\n}\n\n[dir] .ql-bubble .ql-tooltip.ql-flip {\n transform: translateY(-10px);\n}\n\n.ql-bubble .ql-formats {\n display: inline-block;\n vertical-align: middle;\n}\n\n.ql-bubble .ql-formats:after {\n content: '';\n display: table;\n}\n\n[dir] .ql-bubble .ql-formats:after {\n clear: both;\n}\n\n.ql-bubble .ql-stroke {\n fill: none;\n stroke: #ccc;\n stroke-linecap: round;\n stroke-linejoin: round;\n stroke-width: 2;\n}\n\n.ql-bubble .ql-stroke-miter {\n fill: none;\n stroke: #ccc;\n stroke-miterlimit: 10;\n stroke-width: 2;\n}\n\n.ql-bubble .ql-fill,\n.ql-bubble .ql-stroke.ql-fill {\n fill: #ccc;\n}\n\n.ql-bubble .ql-empty {\n fill: none;\n}\n\n.ql-bubble .ql-even {\n fill-rule: evenodd;\n}\n\n.ql-bubble .ql-thin,\n.ql-bubble .ql-stroke.ql-thin {\n stroke-width: 1;\n}\n\n.ql-bubble .ql-transparent {\n opacity: 0.4;\n}\n\n.ql-bubble .ql-direction svg:last-child {\n display: none;\n}\n\n.ql-bubble .ql-direction.ql-active svg:last-child {\n display: inline;\n}\n\n.ql-bubble .ql-direction.ql-active svg:first-child {\n display: none;\n}\n\n.ql-bubble .ql-editor h1 {\n font-size: 2em;\n}\n\n.ql-bubble .ql-editor h2 {\n font-size: 1.5em;\n}\n\n.ql-bubble .ql-editor h3 {\n font-size: 1.17em;\n}\n\n.ql-bubble .ql-editor h4 {\n font-size: 1em;\n}\n\n.ql-bubble .ql-editor h5 {\n font-size: 0.83em;\n}\n\n.ql-bubble .ql-editor h6 {\n font-size: 0.67em;\n}\n\n.ql-bubble .ql-editor a {\n text-decoration: underline;\n}\n\n[dir] .ql-bubble .ql-editor blockquote {\n margin-bottom: 5px;\n margin-top: 5px;\n}\n\n[dir=ltr] .ql-bubble .ql-editor blockquote {\n border-left: 4px solid #ccc;\n padding-left: 16px;\n}\n\n[dir=rtl] .ql-bubble .ql-editor blockquote {\n border-right: 4px solid #ccc;\n padding-right: 16px;\n}\n\n[dir] .ql-bubble .ql-editor code, [dir] .ql-bubble .ql-editor pre {\n background-color: #f0f0f0;\n border-radius: 3px;\n}\n\n.ql-bubble .ql-editor pre {\n white-space: pre-wrap;\n}\n\n[dir] .ql-bubble .ql-editor pre {\n margin-bottom: 5px;\n margin-top: 5px;\n padding: 5px 10px;\n}\n\n.ql-bubble .ql-editor code {\n font-size: 85%;\n}\n\n[dir] .ql-bubble .ql-editor code {\n padding: 2px 4px;\n}\n\n.ql-bubble .ql-editor pre.ql-syntax {\n color: #f8f8f2;\n overflow: visible;\n}\n\n[dir] .ql-bubble .ql-editor pre.ql-syntax {\n background-color: #23241f;\n}\n\n.ql-bubble .ql-editor img {\n max-width: 100%;\n}\n\n.ql-bubble .ql-picker {\n color: #ccc;\n display: inline-block;\n font-size: 14px;\n font-weight: 500;\n height: 24px;\n position: relative;\n vertical-align: middle;\n}\n\n[dir=ltr] .ql-bubble .ql-picker {\n float: left;\n}\n\n[dir=rtl] .ql-bubble .ql-picker {\n float: right;\n}\n\n.ql-bubble .ql-picker-label {\n display: inline-block;\n height: 100%;\n position: relative;\n width: 100%;\n}\n\n[dir] .ql-bubble .ql-picker-label {\n cursor: pointer;\n}\n\n[dir=ltr] .ql-bubble .ql-picker-label {\n padding-left: 8px;\n padding-right: 2px;\n}\n\n[dir=rtl] .ql-bubble .ql-picker-label {\n padding-right: 8px;\n padding-left: 2px;\n}\n\n.ql-bubble .ql-picker-label::before {\n display: inline-block;\n line-height: 22px;\n}\n\n.ql-bubble .ql-picker-options {\n display: none;\n min-width: 100%;\n position: absolute;\n white-space: nowrap;\n}\n\n[dir] .ql-bubble .ql-picker-options {\n background-color: #444;\n padding: 4px 8px;\n}\n\n.ql-bubble .ql-picker-options .ql-picker-item {\n display: block;\n}\n\n[dir] .ql-bubble .ql-picker-options .ql-picker-item {\n cursor: pointer;\n padding-bottom: 5px;\n padding-top: 5px;\n}\n\n.ql-bubble .ql-picker.ql-expanded .ql-picker-label {\n color: #777;\n z-index: 2;\n}\n\n.ql-bubble .ql-picker.ql-expanded .ql-picker-label .ql-fill {\n fill: #777;\n}\n\n.ql-bubble .ql-picker.ql-expanded .ql-picker-label .ql-stroke {\n stroke: #777;\n}\n\n.ql-bubble .ql-picker.ql-expanded .ql-picker-options {\n display: block;\n top: 100%;\n z-index: 1;\n}\n\n[dir] .ql-bubble .ql-picker.ql-expanded .ql-picker-options {\n margin-top: -1px;\n}\n\n.ql-bubble .ql-color-picker,\n.ql-bubble .ql-icon-picker {\n width: 28px;\n}\n\n[dir] .ql-bubble .ql-color-picker .ql-picker-label, [dir] .ql-bubble .ql-icon-picker .ql-picker-label {\n padding: 2px 4px;\n}\n\n[dir=ltr] .ql-bubble .ql-color-picker .ql-picker-label svg, [dir=ltr] .ql-bubble .ql-icon-picker .ql-picker-label svg {\n right: 4px;\n}\n\n[dir=rtl] .ql-bubble .ql-color-picker .ql-picker-label svg, [dir=rtl] .ql-bubble .ql-icon-picker .ql-picker-label svg {\n left: 4px;\n}\n\n[dir] .ql-bubble .ql-icon-picker .ql-picker-options {\n padding: 4px 0px;\n}\n\n.ql-bubble .ql-icon-picker .ql-picker-item {\n height: 24px;\n width: 24px;\n}\n\n[dir] .ql-bubble .ql-icon-picker .ql-picker-item {\n padding: 2px 4px;\n}\n\n.ql-bubble .ql-color-picker .ql-picker-options {\n width: 152px;\n}\n\n[dir] .ql-bubble .ql-color-picker .ql-picker-options {\n padding: 3px 5px;\n}\n\n.ql-bubble .ql-color-picker .ql-picker-item {\n height: 16px;\n width: 16px;\n}\n\n[dir] .ql-bubble .ql-color-picker .ql-picker-item {\n border: 1px solid transparent;\n margin: 2px;\n padding: 0px;\n}\n\n[dir=ltr] .ql-bubble .ql-color-picker .ql-picker-item {\n float: left;\n}\n\n[dir=rtl] .ql-bubble .ql-color-picker .ql-picker-item {\n float: right;\n}\n\n.ql-bubble .ql-picker:not(.ql-color-picker):not(.ql-icon-picker) svg {\n position: absolute;\n top: 50%;\n width: 18px;\n}\n\n[dir] .ql-bubble .ql-picker:not(.ql-color-picker):not(.ql-icon-picker) svg {\n margin-top: -9px;\n}\n\n[dir=ltr] .ql-bubble .ql-picker:not(.ql-color-picker):not(.ql-icon-picker) svg {\n right: 0;\n}\n\n[dir=rtl] .ql-bubble .ql-picker:not(.ql-color-picker):not(.ql-icon-picker) svg {\n left: 0;\n}\n\n.ql-bubble .ql-picker.ql-header .ql-picker-label[data-label]:not([data-label=''])::before,\n.ql-bubble .ql-picker.ql-font .ql-picker-label[data-label]:not([data-label=''])::before,\n.ql-bubble .ql-picker.ql-size .ql-picker-label[data-label]:not([data-label=''])::before,\n.ql-bubble .ql-picker.ql-header .ql-picker-item[data-label]:not([data-label=''])::before,\n.ql-bubble .ql-picker.ql-font .ql-picker-item[data-label]:not([data-label=''])::before,\n.ql-bubble .ql-picker.ql-size .ql-picker-item[data-label]:not([data-label=''])::before {\n content: attr(data-label);\n}\n\n.ql-bubble .ql-picker.ql-header {\n width: 98px;\n}\n\n.ql-bubble .ql-picker.ql-header .ql-picker-label::before,\n.ql-bubble .ql-picker.ql-header .ql-picker-item::before {\n content: 'Normal';\n}\n\n.ql-bubble .ql-picker.ql-header .ql-picker-label[data-value=\"1\"]::before,\n.ql-bubble .ql-picker.ql-header .ql-picker-item[data-value=\"1\"]::before {\n content: 'Heading 1';\n}\n\n.ql-bubble .ql-picker.ql-header .ql-picker-label[data-value=\"2\"]::before,\n.ql-bubble .ql-picker.ql-header .ql-picker-item[data-value=\"2\"]::before {\n content: 'Heading 2';\n}\n\n.ql-bubble .ql-picker.ql-header .ql-picker-label[data-value=\"3\"]::before,\n.ql-bubble .ql-picker.ql-header .ql-picker-item[data-value=\"3\"]::before {\n content: 'Heading 3';\n}\n\n.ql-bubble .ql-picker.ql-header .ql-picker-label[data-value=\"4\"]::before,\n.ql-bubble .ql-picker.ql-header .ql-picker-item[data-value=\"4\"]::before {\n content: 'Heading 4';\n}\n\n.ql-bubble .ql-picker.ql-header .ql-picker-label[data-value=\"5\"]::before,\n.ql-bubble .ql-picker.ql-header .ql-picker-item[data-value=\"5\"]::before {\n content: 'Heading 5';\n}\n\n.ql-bubble .ql-picker.ql-header .ql-picker-label[data-value=\"6\"]::before,\n.ql-bubble .ql-picker.ql-header .ql-picker-item[data-value=\"6\"]::before {\n content: 'Heading 6';\n}\n\n.ql-bubble .ql-picker.ql-header .ql-picker-item[data-value=\"1\"]::before {\n font-size: 2em;\n}\n\n.ql-bubble .ql-picker.ql-header .ql-picker-item[data-value=\"2\"]::before {\n font-size: 1.5em;\n}\n\n.ql-bubble .ql-picker.ql-header .ql-picker-item[data-value=\"3\"]::before {\n font-size: 1.17em;\n}\n\n.ql-bubble .ql-picker.ql-header .ql-picker-item[data-value=\"4\"]::before {\n font-size: 1em;\n}\n\n.ql-bubble .ql-picker.ql-header .ql-picker-item[data-value=\"5\"]::before {\n font-size: 0.83em;\n}\n\n.ql-bubble .ql-picker.ql-header .ql-picker-item[data-value=\"6\"]::before {\n font-size: 0.67em;\n}\n\n.ql-bubble .ql-picker.ql-font {\n width: 108px;\n}\n\n.ql-bubble .ql-picker.ql-font .ql-picker-label::before,\n.ql-bubble .ql-picker.ql-font .ql-picker-item::before {\n content: 'Sans Serif';\n}\n\n.ql-bubble .ql-picker.ql-font .ql-picker-label[data-value=serif]::before,\n.ql-bubble .ql-picker.ql-font .ql-picker-item[data-value=serif]::before {\n content: 'Serif';\n}\n\n.ql-bubble .ql-picker.ql-font .ql-picker-label[data-value=monospace]::before,\n.ql-bubble .ql-picker.ql-font .ql-picker-item[data-value=monospace]::before {\n content: 'Monospace';\n}\n\n.ql-bubble .ql-picker.ql-font .ql-picker-item[data-value=serif]::before {\n font-family: Georgia, Times New Roman, serif;\n}\n\n.ql-bubble .ql-picker.ql-font .ql-picker-item[data-value=monospace]::before {\n font-family: Monaco, Courier New, monospace;\n}\n\n.ql-bubble .ql-picker.ql-size {\n width: 98px;\n}\n\n.ql-bubble .ql-picker.ql-size .ql-picker-label::before,\n.ql-bubble .ql-picker.ql-size .ql-picker-item::before {\n content: 'Normal';\n}\n\n.ql-bubble .ql-picker.ql-size .ql-picker-label[data-value=small]::before,\n.ql-bubble .ql-picker.ql-size .ql-picker-item[data-value=small]::before {\n content: 'Small';\n}\n\n.ql-bubble .ql-picker.ql-size .ql-picker-label[data-value=large]::before,\n.ql-bubble .ql-picker.ql-size .ql-picker-item[data-value=large]::before {\n content: 'Large';\n}\n\n.ql-bubble .ql-picker.ql-size .ql-picker-label[data-value=huge]::before,\n.ql-bubble .ql-picker.ql-size .ql-picker-item[data-value=huge]::before {\n content: 'Huge';\n}\n\n.ql-bubble .ql-picker.ql-size .ql-picker-item[data-value=small]::before {\n font-size: 10px;\n}\n\n.ql-bubble .ql-picker.ql-size .ql-picker-item[data-value=large]::before {\n font-size: 18px;\n}\n\n.ql-bubble .ql-picker.ql-size .ql-picker-item[data-value=huge]::before {\n font-size: 32px;\n}\n\n[dir] .ql-bubble .ql-color-picker.ql-background .ql-picker-item {\n background-color: #fff;\n}\n\n[dir] .ql-bubble .ql-color-picker.ql-color .ql-picker-item {\n background-color: #000;\n}\n\n[dir=ltr] .ql-bubble .ql-toolbar .ql-formats {\n margin: 8px 12px 8px 0px;\n}\n\n[dir=rtl] .ql-bubble .ql-toolbar .ql-formats {\n margin: 8px 0px 8px 12px;\n}\n\n[dir=ltr] .ql-bubble .ql-toolbar .ql-formats:first-child {\n margin-left: 12px;\n}\n\n[dir=rtl] .ql-bubble .ql-toolbar .ql-formats:first-child {\n margin-right: 12px;\n}\n\n[dir] .ql-bubble .ql-color-picker svg {\n margin: 1px;\n}\n\n[dir] .ql-bubble .ql-color-picker .ql-picker-item.ql-selected, [dir] .ql-bubble .ql-color-picker .ql-picker-item:hover {\n border-color: #fff;\n}\n\n.ql-bubble .ql-tooltip {\n color: #fff;\n}\n\n[dir] .ql-bubble .ql-tooltip {\n background-color: #444;\n border-radius: 25px;\n}\n\n.ql-bubble .ql-tooltip-arrow {\n content: \" \";\n display: block;\n position: absolute;\n}\n\n[dir=ltr] .ql-bubble .ql-tooltip-arrow {\n border-left: 6px solid transparent;\n border-right: 6px solid transparent;\n left: 50%;\n margin-left: -6px;\n}\n\n[dir=rtl] .ql-bubble .ql-tooltip-arrow {\n border-right: 6px solid transparent;\n border-left: 6px solid transparent;\n right: 50%;\n margin-right: -6px;\n}\n\n.ql-bubble .ql-tooltip:not(.ql-flip) .ql-tooltip-arrow {\n top: -6px;\n}\n\n[dir] .ql-bubble .ql-tooltip:not(.ql-flip) .ql-tooltip-arrow {\n border-bottom: 6px solid #444;\n}\n\n.ql-bubble .ql-tooltip.ql-flip .ql-tooltip-arrow {\n bottom: -6px;\n}\n\n[dir] .ql-bubble .ql-tooltip.ql-flip .ql-tooltip-arrow {\n border-top: 6px solid #444;\n}\n\n.ql-bubble .ql-tooltip.ql-editing .ql-tooltip-editor {\n display: block;\n}\n\n.ql-bubble .ql-tooltip.ql-editing .ql-formats {\n visibility: hidden;\n}\n\n.ql-bubble .ql-tooltip-editor {\n display: none;\n}\n\n.ql-bubble .ql-tooltip-editor input[type=text] {\n color: #fff;\n font-size: 13px;\n height: 100%;\n outline: none;\n position: absolute;\n width: 100%;\n}\n\n[dir] .ql-bubble .ql-tooltip-editor input[type=text] {\n background: transparent;\n border: none;\n padding: 10px 20px;\n}\n\n.ql-bubble .ql-tooltip-editor a {\n top: 10px;\n position: absolute;\n}\n\n[dir=ltr] .ql-bubble .ql-tooltip-editor a {\n right: 20px;\n}\n\n[dir=rtl] .ql-bubble .ql-tooltip-editor a {\n left: 20px;\n}\n\n.ql-bubble .ql-tooltip-editor a:before {\n color: #ccc;\n content: \"\\D7\";\n font-size: 16px;\n font-weight: bold;\n}\n\n.ql-container.ql-bubble:not(.ql-disabled) a {\n position: relative;\n white-space: nowrap;\n}\n\n.ql-container.ql-bubble:not(.ql-disabled) a::before {\n top: -5px;\n font-size: 12px;\n color: #fff;\n content: attr(href);\n font-weight: normal;\n overflow: hidden;\n text-decoration: none;\n z-index: 1;\n}\n\n[dir] .ql-container.ql-bubble:not(.ql-disabled) a::before {\n background-color: #444;\n border-radius: 15px;\n padding: 5px 15px;\n}\n\n.ql-container.ql-bubble:not(.ql-disabled) a::after {\n top: 0;\n content: \" \";\n height: 0;\n width: 0;\n}\n\n[dir] .ql-container.ql-bubble:not(.ql-disabled) a::after {\n border-top: 6px solid #444;\n}\n\n[dir=ltr] .ql-container.ql-bubble:not(.ql-disabled) a::after {\n border-left: 6px solid transparent;\n border-right: 6px solid transparent;\n}\n\n[dir=rtl] .ql-container.ql-bubble:not(.ql-disabled) a::after {\n border-right: 6px solid transparent;\n border-left: 6px solid transparent;\n}\n\n.ql-container.ql-bubble:not(.ql-disabled) a::before,\n.ql-container.ql-bubble:not(.ql-disabled) a::after {\n position: absolute;\n transition: visibility 0s ease 200ms;\n visibility: hidden;\n}\n\n[dir=ltr] .ql-container.ql-bubble:not(.ql-disabled) a::before, [dir=ltr] .ql-container.ql-bubble:not(.ql-disabled) a::after {\n left: 0;\n margin-left: 50%;\n transform: translate(-50%, -100%);\n}\n\n[dir=rtl] .ql-container.ql-bubble:not(.ql-disabled) a::before, [dir=rtl] .ql-container.ql-bubble:not(.ql-disabled) a::after {\n right: 0;\n margin-right: 50%;\n transform: translate(50%, -100%);\n}\n\n.ql-container.ql-bubble:not(.ql-disabled) a:hover::before,\n.ql-container.ql-bubble:not(.ql-disabled) a:hover::after {\n visibility: visible;\n}\n", ""]);
// Exports
module.exports = exports;
/***/ }),
/***/ "./node_modules/quill/dist/quill.bubble.css":
/*!**************************************************!*\
!*** ./node_modules/quill/dist/quill.bubble.css ***!
\**************************************************/
/*! no static exports found */
/***/ (function(module, exports, __webpack_require__) {
var content = __webpack_require__(/*! !../../css-loader/dist/cjs.js??ref--7-1!../../postcss-loader/src??ref--7-2!./quill.bubble.css */ "./node_modules/css-loader/dist/cjs.js?!./node_modules/postcss-loader/src/index.js?!./node_modules/quill/dist/quill.bubble.css"); |
if(typeof content === 'string') content = [[module.i, content, '']];
var transform;
var insertInto;
var options = {"hmr":true}
options.transform = transform
options.insertInto = undefined;
var update = __webpack_require__(/*! ../../style-loader/lib/addStyles.js */ "./node_modules/style-loader/lib/addStyles.js")(content, options);
if(content.locals) module.exports = content.locals;
if(false) {}
/***/ })
}]); | |
Opengauss_Function_Keyword_Reloptions_Case0020.py | """
Copyright (c) 2022 Huawei Technologies Co.,Ltd.
openGauss is licensed under Mulan PSL v2.
You can use this software according to the terms and conditions of the Mulan PSL v2.
You may obtain a copy of Mulan PSL v2 at:
http://license.coscl.org.cn/MulanPSL2
THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
See the Mulan PSL v2 for more details.
"""
'''
#-- @testpoint:opengauss关键字reloptions(非保留),作为目录对象名
'''
import unittest
from testcase.utils.Logger import Logger
from testcase.utils.Constant import Constant
from testcase.utils.CommonSH import CommonSH
logger = Logger()
commonsh = CommonSH('dbuser')
constant = Constant()
class Hostname(unittest.TestCase):
def setUp(self):
logger.info("------------------------ Opengauss_Function_Keyword_Reloptions_Case0020 开始执行--------------------------")
# 关键字作为目录对象名不带双引号 - 成功
def test_reloptions_1(self):
SqlMdg = commonsh.execut_db_sql('''create directory reloptions as '/tmp/';
drop directory reloptions;''')
logger.info(SqlMdg)
self.assertIn(constant.CREATE_DIRECTORY_SUCCESS_MSG, SqlMdg)
self.assertIn(constant.DROP_DIRECTORY_SUCCESS_MSG, SqlMdg)
# 关键字作为目录对象名带双引号—成功
def test_reloptions_2(self):
SqlMdg = commonsh.execut_db_sql('''create directory "reloptions" as '/tmp/';
| ''drop directory if exists 'reloptions';''')
logger.info(SqlMdg)
self.assertIn(constant.SYNTAX_ERROR_MSG, SqlMdg)
SqlMdg = commonsh.execut_db_sql(''' create directory 'reloptions' as '/tmp/';''')
logger.info(SqlMdg)
self.assertIn(constant.SYNTAX_ERROR_MSG, SqlMdg)
#关键字作为目录对象名带反引号 - 合理报错
def test_reloptions_4(self):
SqlMdg = commonsh.execut_db_sql('''drop directory if exists \`reloptions\`;''')
logger.info(SqlMdg)
self.assertIn(constant.SYNTAX_ERROR_MSG, SqlMdg)
SqlMdg = commonsh.execut_db_sql('''create directory \`reloptions\` as '/tmp/';''')
logger.info(SqlMdg)
self.assertIn(constant.SYNTAX_ERROR_MSG, SqlMdg)
def tearDown(self):
logger.info('------------------------ Opengauss_Function_Keyword_Reloptions_Case0020 执行结束--------------------------') | drop directory "reloptions";''')
logger.info(SqlMdg)
self.assertIn(constant.CREATE_DIRECTORY_SUCCESS_MSG, SqlMdg)
self.assertIn(constant.DROP_DIRECTORY_SUCCESS_MSG, SqlMdg)
# 关键字作为目录对象名带单引号 - 合理报错
def test_reloptions_3(self):
SqlMdg = commonsh.execut_db_sql(' |
parser.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub use self::PathParsingMode::*;
use abi;
use ast::BareFnTy;
use ast::{RegionTyParamBound, TraitTyParamBound, TraitBoundModifier};
use ast::{Public, Unsafety};
use ast::{Mod, BiAdd, Arg, Arm, Attribute, BindByRef, BindByValue};
use ast::{BiBitAnd, BiBitOr, BiBitXor, BiRem, BiLt, BiGt, Block};
use ast::{BlockCheckMode, CaptureByRef, CaptureByValue, CaptureClause};
use ast::{ConstImplItem, ConstTraitItem, Crate, CrateConfig};
use ast::{Decl, DeclItem, DeclLocal, DefaultBlock, DefaultReturn};
use ast::{UnDeref, BiDiv, EMPTY_CTXT, EnumDef, ExplicitSelf};
use ast::{Expr, Expr_, ExprAddrOf, ExprMatch, ExprAgain};
use ast::{ExprAssign, ExprAssignOp, ExprBinary, ExprBlock, ExprBox};
use ast::{ExprBreak, ExprCall, ExprCast};
use ast::{ExprField, ExprTupField, ExprClosure, ExprIf, ExprIfLet, ExprIndex};
use ast::{ExprLit, ExprLoop, ExprMac, ExprRange};
use ast::{ExprMethodCall, ExprParen, ExprPath};
use ast::{ExprRepeat, ExprRet, ExprStruct, ExprTup, ExprUnary};
use ast::{ExprVec, ExprWhile, ExprWhileLet, ExprForLoop, Field, FnDecl};
use ast::{ForeignItem, ForeignItemStatic, ForeignItemFn, ForeignMod, FunctionRetTy};
use ast::{Ident, Inherited, ImplItem, Item, Item_, ItemStatic};
use ast::{ItemEnum, ItemFn, ItemForeignMod, ItemImpl, ItemConst};
use ast::{ItemMac, ItemMod, ItemStruct, ItemTrait, ItemTy, ItemDefaultImpl};
use ast::{ItemExternCrate, ItemUse};
use ast::{LifetimeDef, Lit, Lit_};
use ast::{LitBool, LitChar, LitByte, LitBinary};
use ast::{LitStr, LitInt, Local, LocalLet};
use ast::{MacStmtWithBraces, MacStmtWithSemicolon, MacStmtWithoutBraces};
use ast::{MutImmutable, MutMutable, Mac_, MacInvocTT, MatchSource};
use ast::{MutTy, BiMul, Mutability};
use ast::{MethodImplItem, NamedField, UnNeg, NoReturn, UnNot};
use ast::{Pat, PatBox, PatEnum, PatIdent, PatLit, PatQPath, PatMac, PatRange};
use ast::{PatRegion, PatStruct, PatTup, PatVec, PatWild, PatWildMulti};
use ast::PatWildSingle;
use ast::{PolyTraitRef, QSelf};
use ast::{Return, BiShl, BiShr, Stmt, StmtDecl};
use ast::{StmtExpr, StmtSemi, StmtMac, StructDef, StructField};
use ast::{StructVariantKind, BiSub, StrStyle};
use ast::{SelfExplicit, SelfRegion, SelfStatic, SelfValue};
use ast::{Delimited, SequenceRepetition, TokenTree, TraitItem, TraitRef};
use ast::{TtDelimited, TtSequence, TtToken};
use ast::{TupleVariantKind, Ty, Ty_, TypeBinding};
use ast::{TyFixedLengthVec, TyBareFn, TyTypeof, TyInfer};
use ast::{TyParam, TyParamBound, TyParen, TyPath, TyPolyTraitRef, TyPtr};
use ast::{TyRptr, TyTup, TyU32, TyVec, UnUniq};
use ast::{TypeImplItem, TypeTraitItem};
use ast::{UnnamedField, UnsafeBlock};
use ast::{ViewPath, ViewPathGlob, ViewPathList, ViewPathSimple};
use ast::{Visibility, WhereClause};
use ast;
use ast_util::{self, AS_PREC, ident_to_path, operator_prec};
use codemap::{self, Span, BytePos, Spanned, spanned, mk_sp};
use diagnostic;
use ext::tt::macro_parser;
use parse;
use parse::attr::ParserAttr;
use parse::classify;
use parse::common::{SeqSep, seq_sep_none, seq_sep_trailing_allowed};
use parse::lexer::{Reader, TokenAndSpan};
use parse::obsolete::{ParserObsoleteMethods, ObsoleteSyntax};
use parse::token::{self, MatchNt, SubstNt, SpecialVarNt, InternedString};
use parse::token::{keywords, special_idents, SpecialMacroVar};
use parse::{new_sub_parser_from_file, ParseSess};
use print::pprust;
use ptr::P;
use owned_slice::OwnedSlice;
use parse::PResult;
use diagnostic::FatalError;
use std::collections::HashSet;
use std::fs;
use std::io::prelude::*;
use std::mem;
use std::path::{Path, PathBuf};
use std::rc::Rc;
use std::slice;
bitflags! {
flags Restrictions: u8 {
const UNRESTRICTED = 0b0000,
const RESTRICTION_STMT_EXPR = 0b0001,
const RESTRICTION_NO_STRUCT_LITERAL = 0b0010,
}
}
type ItemInfo = (Ident, Item_, Option<Vec<Attribute> >);
/// How to parse a path. There are four different kinds of paths, all of which
/// are parsed somewhat differently.
#[derive(Copy, Clone, PartialEq)]
pub enum PathParsingMode {
/// A path with no type parameters; e.g. `foo::bar::Baz`
NoTypesAllowed,
/// A path with a lifetime and type parameters, with no double colons
/// before the type parameters; e.g. `foo::bar<'a>::Baz<T>`
LifetimeAndTypesWithoutColons,
/// A path with a lifetime and type parameters with double colons before
/// the type parameters; e.g. `foo::bar::<'a>::Baz::<T>`
LifetimeAndTypesWithColons,
}
/// How to parse a qualified path, whether to allow trailing parameters.
#[derive(Copy, Clone, PartialEq)]
pub enum QPathParsingMode {
/// No trailing parameters, e.g. `<T as Trait>::Item`
NoParameters,
/// Optional parameters, e.g. `<T as Trait>::item::<'a, U>`
MaybeParameters,
}
/// How to parse a bound, whether to allow bound modifiers such as `?`.
#[derive(Copy, Clone, PartialEq)]
pub enum BoundParsingMode {
Bare,
Modified,
}
/// Possibly accept an `token::Interpolated` expression (a pre-parsed expression
/// dropped into the token stream, which happens while parsing the result of
/// macro expansion). Placement of these is not as complex as I feared it would
/// be. The important thing is to make sure that lookahead doesn't balk at
/// `token::Interpolated` tokens.
macro_rules! maybe_whole_expr {
($p:expr) => (
{
let found = match $p.token {
token::Interpolated(token::NtExpr(ref e)) => {
Some((*e).clone())
}
token::Interpolated(token::NtPath(_)) => {
// FIXME: The following avoids an issue with lexical borrowck scopes,
// but the clone is unfortunate.
let pt = match $p.token {
token::Interpolated(token::NtPath(ref pt)) => (**pt).clone(),
_ => unreachable!()
};
let span = $p.span;
Some($p.mk_expr(span.lo, span.hi, ExprPath(None, pt)))
}
token::Interpolated(token::NtBlock(_)) => {
// FIXME: The following avoids an issue with lexical borrowck scopes,
// but the clone is unfortunate.
let b = match $p.token {
token::Interpolated(token::NtBlock(ref b)) => (*b).clone(),
_ => unreachable!()
};
let span = $p.span;
Some($p.mk_expr(span.lo, span.hi, ExprBlock(b)))
}
_ => None
};
match found {
Some(e) => {
try!($p.bump());
return Ok(e);
}
None => ()
}
}
)
}
/// As maybe_whole_expr, but for things other than expressions
macro_rules! maybe_whole {
($p:expr, $constructor:ident) => (
{
let found = match ($p).token {
token::Interpolated(token::$constructor(_)) => {
Some(try!(($p).bump_and_get()))
}
_ => None
};
if let Some(token::Interpolated(token::$constructor(x))) = found {
return Ok(x.clone());
}
}
);
(no_clone $p:expr, $constructor:ident) => (
{
let found = match ($p).token {
token::Interpolated(token::$constructor(_)) => {
Some(try!(($p).bump_and_get()))
}
_ => None
};
if let Some(token::Interpolated(token::$constructor(x))) = found {
return Ok(x);
}
}
);
(deref $p:expr, $constructor:ident) => (
{
let found = match ($p).token {
token::Interpolated(token::$constructor(_)) => {
Some(try!(($p).bump_and_get()))
}
_ => None
};
if let Some(token::Interpolated(token::$constructor(x))) = found {
return Ok((*x).clone());
}
}
);
(Some deref $p:expr, $constructor:ident) => (
{
let found = match ($p).token {
token::Interpolated(token::$constructor(_)) => {
Some(try!(($p).bump_and_get()))
}
_ => None
};
if let Some(token::Interpolated(token::$constructor(x))) = found {
return Ok(Some((*x).clone()));
}
}
);
(pair_empty $p:expr, $constructor:ident) => (
{
let found = match ($p).token {
token::Interpolated(token::$constructor(_)) => {
Some(try!(($p).bump_and_get()))
}
_ => None
};
if let Some(token::Interpolated(token::$constructor(x))) = found {
return Ok((Vec::new(), x));
}
}
)
}
fn maybe_append(mut lhs: Vec<Attribute>, rhs: Option<Vec<Attribute>>)
-> Vec<Attribute> {
if let Some(ref attrs) = rhs {
lhs.extend(attrs.iter().cloned())
}
lhs
}
/* ident is handled by common.rs */
pub struct Parser<'a> {
pub sess: &'a ParseSess,
/// the current token:
pub token: token::Token,
/// the span of the current token:
pub span: Span,
/// the span of the prior token:
pub last_span: Span,
pub cfg: CrateConfig,
/// the previous token or None (only stashed sometimes).
pub last_token: Option<Box<token::Token>>,
pub buffer: [TokenAndSpan; 4],
pub buffer_start: isize,
pub buffer_end: isize,
pub tokens_consumed: usize,
pub restrictions: Restrictions,
pub quote_depth: usize, // not (yet) related to the quasiquoter
pub reader: Box<Reader+'a>,
pub interner: Rc<token::IdentInterner>,
/// The set of seen errors about obsolete syntax. Used to suppress
/// extra detail when the same error is seen twice
pub obsolete_set: HashSet<ObsoleteSyntax>,
/// Used to determine the path to externally loaded source files
pub mod_path_stack: Vec<InternedString>,
/// Stack of spans of open delimiters. Used for error message.
pub open_braces: Vec<Span>,
/// Flag if this parser "owns" the directory that it is currently parsing
/// in. This will affect how nested files are looked up.
pub owns_directory: bool,
/// Name of the root module this parser originated from. If `None`, then the
/// name is not known. This does not change while the parser is descending
/// into modules, and sub-parsers have new values for this name.
pub root_module_name: Option<String>,
pub expected_tokens: Vec<TokenType>,
}
#[derive(PartialEq, Eq, Clone)]
pub enum TokenType {
Token(token::Token),
Keyword(keywords::Keyword),
Operator,
}
impl TokenType {
fn to_string(&self) -> String {
match *self {
TokenType::Token(ref t) => format!("`{}`", Parser::token_to_string(t)),
TokenType::Operator => "an operator".to_string(),
TokenType::Keyword(kw) => format!("`{}`", token::get_name(kw.to_name())),
}
}
}
fn is_plain_ident_or_underscore(t: &token::Token) -> bool {
t.is_plain_ident() || *t == token::Underscore
}
impl<'a> Parser<'a> {
pub fn new(sess: &'a ParseSess,
cfg: ast::CrateConfig,
mut rdr: Box<Reader+'a>)
-> Parser<'a>
{
let tok0 = rdr.real_token();
let span = tok0.sp;
let placeholder = TokenAndSpan {
tok: token::Underscore,
sp: span,
};
Parser {
reader: rdr,
interner: token::get_ident_interner(),
sess: sess,
cfg: cfg,
token: tok0.tok,
span: span,
last_span: span,
last_token: None,
buffer: [
placeholder.clone(),
placeholder.clone(),
placeholder.clone(),
placeholder.clone(),
],
buffer_start: 0,
buffer_end: 0,
tokens_consumed: 0,
restrictions: UNRESTRICTED,
quote_depth: 0,
obsolete_set: HashSet::new(),
mod_path_stack: Vec::new(),
open_braces: Vec::new(),
owns_directory: true,
root_module_name: None,
expected_tokens: Vec::new(),
}
}
// Panicing fns (for now!)
// This is so that the quote_*!() syntax extensions
pub fn parse_expr(&mut self) -> P<Expr> {
panictry!(self.parse_expr_nopanic())
}
pub fn parse_item(&mut self) -> Option<P<Item>> {
panictry!(self.parse_item_nopanic())
}
pub fn parse_pat(&mut self) -> P<Pat> {
panictry!(self.parse_pat_nopanic())
}
pub fn parse_arm(&mut self) -> Arm {
panictry!(self.parse_arm_nopanic())
}
pub fn parse_ty(&mut self) -> P<Ty> {
panictry!(self.parse_ty_nopanic())
}
pub fn parse_stmt(&mut self) -> Option<P<Stmt>> {
panictry!(self.parse_stmt_nopanic())
}
/// Convert a token to a string using self's reader
pub fn token_to_string(token: &token::Token) -> String {
pprust::token_to_string(token)
}
/// Convert the current token to a string using self's reader
pub fn this_token_to_string(&self) -> String {
Parser::token_to_string(&self.token)
}
pub fn unexpected_last(&self, t: &token::Token) -> FatalError {
let token_str = Parser::token_to_string(t);
let last_span = self.last_span;
self.span_fatal(last_span, &format!("unexpected token: `{}`",
token_str))
}
pub fn unexpected(&mut self) -> FatalError {
match self.expect_one_of(&[], &[]) {
Err(e) => e,
Ok(_) => unreachable!()
}
}
/// Expect and consume the token t. Signal an error if
/// the next token is not t.
pub fn expect(&mut self, t: &token::Token) -> PResult<()> {
if self.expected_tokens.is_empty() {
if self.token == *t {
self.bump()
} else {
let token_str = Parser::token_to_string(t);
let this_token_str = self.this_token_to_string();
Err(self.fatal(&format!("expected `{}`, found `{}`",
token_str,
this_token_str)))
}
} else {
self.expect_one_of(slice::ref_slice(t), &[])
}
}
/// Expect next token to be edible or inedible token. If edible,
/// then consume it; if inedible, then return without consuming
/// anything. Signal a fatal error if next token is unexpected.
pub fn expect_one_of(&mut self,
edible: &[token::Token],
inedible: &[token::Token]) -> PResult<()>{
fn tokens_to_string(tokens: &[TokenType]) -> String {
let mut i = tokens.iter();
// This might be a sign we need a connect method on Iterator.
let b = i.next()
.map_or("".to_string(), |t| t.to_string());
i.enumerate().fold(b, |mut b, (i, ref a)| {
if tokens.len() > 2 && i == tokens.len() - 2 {
b.push_str(", or ");
} else if tokens.len() == 2 && i == tokens.len() - 2 {
b.push_str(" or ");
} else {
b.push_str(", ");
}
b.push_str(&*a.to_string());
b
})
}
if edible.contains(&self.token) {
self.bump()
} else if inedible.contains(&self.token) {
// leave it in the input
Ok(())
} else {
let mut expected = edible.iter()
.map(|x| TokenType::Token(x.clone()))
.chain(inedible.iter().map(|x| TokenType::Token(x.clone())))
.chain(self.expected_tokens.iter().cloned())
.collect::<Vec<_>>();
expected.sort_by(|a, b| a.to_string().cmp(&b.to_string()));
expected.dedup();
let expect = tokens_to_string(&expected[..]);
let actual = self.this_token_to_string();
Err(self.fatal(
&(if expected.len() > 1 {
(format!("expected one of {}, found `{}`",
expect,
actual))
} else if expected.is_empty() {
(format!("unexpected token: `{}`",
actual))
} else {
(format!("expected {}, found `{}`",
expect,
actual))
})[..]
))
}
}
/// Check for erroneous `ident { }`; if matches, signal error and
/// recover (without consuming any expected input token). Returns
/// true if and only if input was consumed for recovery.
pub fn check_for_erroneous_unit_struct_expecting(&mut self,
expected: &[token::Token])
-> PResult<bool> {
if self.token == token::OpenDelim(token::Brace)
&& expected.iter().all(|t| *t != token::OpenDelim(token::Brace))
&& self.look_ahead(1, |t| *t == token::CloseDelim(token::Brace)) {
// matched; signal non-fatal error and recover.
let span = self.span;
self.span_err(span,
"unit-like struct construction is written with no trailing `{ }`");
try!(self.eat(&token::OpenDelim(token::Brace)));
try!(self.eat(&token::CloseDelim(token::Brace)));
Ok(true)
} else {
Ok(false)
}
}
/// Commit to parsing a complete expression `e` expected to be
/// followed by some token from the set edible + inedible. Recover
/// from anticipated input errors, discarding erroneous characters.
pub fn commit_expr(&mut self, e: &Expr, edible: &[token::Token],
inedible: &[token::Token]) -> PResult<()> {
debug!("commit_expr {:?}", e);
if let ExprPath(..) = e.node {
// might be unit-struct construction; check for recoverableinput error.
let expected = edible.iter()
.cloned()
.chain(inedible.iter().cloned())
.collect::<Vec<_>>();
try!(self.check_for_erroneous_unit_struct_expecting(&expected[..]));
}
self.expect_one_of(edible, inedible)
}
pub fn commit_expr_expecting(&mut self, e: &Expr, edible: token::Token) -> PResult<()> {
self.commit_expr(e, &[edible], &[])
}
/// Commit to parsing a complete statement `s`, which expects to be
/// followed by some token from the set edible + inedible. Check
/// for recoverable input errors, discarding erroneous characters.
pub fn commit_stmt(&mut self, edible: &[token::Token],
inedible: &[token::Token]) -> PResult<()> {
if self.last_token
.as_ref()
.map_or(false, |t| t.is_ident() || t.is_path()) {
let expected = edible.iter()
.cloned()
.chain(inedible.iter().cloned())
.collect::<Vec<_>>();
try!(self.check_for_erroneous_unit_struct_expecting(&expected));
}
self.expect_one_of(edible, inedible)
}
pub fn commit_stmt_expecting(&mut self, edible: token::Token) -> PResult<()> {
self.commit_stmt(&[edible], &[])
}
pub fn parse_ident(&mut self) -> PResult<ast::Ident> {
self.check_strict_keywords();
try!(self.check_reserved_keywords());
match self.token {
token::Ident(i, _) => {
try!(self.bump());
Ok(i)
}
token::Interpolated(token::NtIdent(..)) => {
self.bug("ident interpolation not converted to real token");
}
_ => {
let token_str = self.this_token_to_string();
Err(self.fatal(&format!("expected ident, found `{}`",
token_str)))
}
}
}
pub fn parse_ident_or_self_type(&mut self) -> PResult<ast::Ident> {
if self.is_self_type_ident() {
self.expect_self_type_ident()
} else {
self.parse_ident()
}
}
pub fn parse_path_list_item(&mut self) -> PResult<ast::PathListItem> {
let lo = self.span.lo;
let node = if try!(self.eat_keyword(keywords::SelfValue)) {
ast::PathListMod { id: ast::DUMMY_NODE_ID }
} else {
let ident = try!(self.parse_ident());
ast::PathListIdent { name: ident, id: ast::DUMMY_NODE_ID }
};
let hi = self.last_span.hi;
Ok(spanned(lo, hi, node))
}
/// Check if the next token is `tok`, and return `true` if so.
///
/// This method is will automatically add `tok` to `expected_tokens` if `tok` is not
/// encountered.
pub fn check(&mut self, tok: &token::Token) -> bool {
let is_present = self.token == *tok;
if !is_present { self.expected_tokens.push(TokenType::Token(tok.clone())); }
is_present
}
/// Consume token 'tok' if it exists. Returns true if the given
/// token was present, false otherwise.
pub fn eat(&mut self, tok: &token::Token) -> PResult<bool> {
let is_present = self.check(tok);
if is_present { try!(self.bump())}
Ok(is_present)
}
pub fn check_keyword(&mut self, kw: keywords::Keyword) -> bool {
self.expected_tokens.push(TokenType::Keyword(kw));
self.token.is_keyword(kw)
}
/// If the next token is the given keyword, eat it and return
/// true. Otherwise, return false.
pub fn eat_keyword(&mut self, kw: keywords::Keyword) -> PResult<bool> {
if self.check_keyword(kw) {
try!(self.bump());
Ok(true)
} else {
Ok(false)
}
}
pub fn eat_keyword_noexpect(&mut self, kw: keywords::Keyword) -> PResult<bool> {
if self.token.is_keyword(kw) {
try!(self.bump());
Ok(true)
} else {
Ok(false)
}
}
/// If the given word is not a keyword, signal an error.
/// If the next token is not the given word, signal an error.
/// Otherwise, eat it.
pub fn expect_keyword(&mut self, kw: keywords::Keyword) -> PResult<()> {
if !try!(self.eat_keyword(kw) ){
self.expect_one_of(&[], &[])
} else {
Ok(())
}
}
/// Signal an error if the given string is a strict keyword
pub fn check_strict_keywords(&mut self) {
if self.token.is_strict_keyword() {
let token_str = self.this_token_to_string();
let span = self.span;
self.span_err(span,
&format!("expected identifier, found keyword `{}`",
token_str));
}
}
/// Signal an error if the current token is a reserved keyword
pub fn check_reserved_keywords(&mut self) -> PResult<()>{
if self.token.is_reserved_keyword() {
let token_str = self.this_token_to_string();
Err(self.fatal(&format!("`{}` is a reserved keyword",
token_str)))
} else {
Ok(())
}
}
/// Expect and consume an `&`. If `&&` is seen, replace it with a single
/// `&` and continue. If an `&` is not seen, signal an error.
fn expect_and(&mut self) -> PResult<()> {
self.expected_tokens.push(TokenType::Token(token::BinOp(token::And)));
match self.token {
token::BinOp(token::And) => self.bump(),
token::AndAnd => {
let span = self.span;
let lo = span.lo + BytePos(1);
Ok(self.replace_token(token::BinOp(token::And), lo, span.hi))
}
_ => self.expect_one_of(&[], &[])
}
}
pub fn expect_no_suffix(&self, sp: Span, kind: &str, suffix: Option<ast::Name>) {
match suffix {
None => {/* everything ok */}
Some(suf) => {
let text = suf.as_str();
if text.is_empty() {
self.span_bug(sp, "found empty literal suffix in Some")
}
self.span_err(sp, &*format!("{} with a suffix is illegal", kind));
}
}
}
/// Attempt to consume a `<`. If `<<` is seen, replace it with a single
/// `<` and continue. If a `<` is not seen, return false.
///
/// This is meant to be used when parsing generics on a path to get the
/// starting token.
fn eat_lt(&mut self) -> PResult<bool> {
self.expected_tokens.push(TokenType::Token(token::Lt));
match self.token {
token::Lt => { try!(self.bump()); Ok(true)}
token::BinOp(token::Shl) => {
let span = self.span;
let lo = span.lo + BytePos(1);
self.replace_token(token::Lt, lo, span.hi);
Ok(true)
}
_ => Ok(false),
}
}
fn expect_lt(&mut self) -> PResult<()> {
if !try!(self.eat_lt()) {
self.expect_one_of(&[], &[])
} else {
Ok(())
}
}
/// Expect and consume a GT. if a >> is seen, replace it
/// with a single > and continue. If a GT is not seen,
/// signal an error.
pub fn expect_gt(&mut self) -> PResult<()> {
self.expected_tokens.push(TokenType::Token(token::Gt));
match self.token {
token::Gt => self.bump(),
token::BinOp(token::Shr) => {
let span = self.span;
let lo = span.lo + BytePos(1);
Ok(self.replace_token(token::Gt, lo, span.hi))
}
token::BinOpEq(token::Shr) => {
let span = self.span;
let lo = span.lo + BytePos(1);
Ok(self.replace_token(token::Ge, lo, span.hi))
}
token::Ge => {
let span = self.span;
let lo = span.lo + BytePos(1);
Ok(self.replace_token(token::Eq, lo, span.hi))
}
_ => {
let gt_str = Parser::token_to_string(&token::Gt);
let this_token_str = self.this_token_to_string();
Err(self.fatal(&format!("expected `{}`, found `{}`",
gt_str,
this_token_str)))
}
}
}
pub fn parse_seq_to_before_gt_or_return<T, F>(&mut self,
sep: Option<token::Token>,
mut f: F)
-> PResult<(OwnedSlice<T>, bool)> where
F: FnMut(&mut Parser) -> PResult<Option<T>>,
{
let mut v = Vec::new();
// This loop works by alternating back and forth between parsing types
// and commas. For example, given a string `A, B,>`, the parser would
// first parse `A`, then a comma, then `B`, then a comma. After that it
// would encounter a `>` and stop. This lets the parser handle trailing
// commas in generic parameters, because it can stop either after
// parsing a type or after parsing a comma.
for i in 0.. {
if self.check(&token::Gt)
|| self.token == token::BinOp(token::Shr)
|| self.token == token::Ge
|| self.token == token::BinOpEq(token::Shr) {
break;
}
if i % 2 == 0 {
match try!(f(self)) {
Some(result) => v.push(result),
None => return Ok((OwnedSlice::from_vec(v), true))
}
} else {
if let Some(t) = sep.as_ref() {
try!(self.expect(t));
}
}
}
return Ok((OwnedSlice::from_vec(v), false));
}
/// Parse a sequence bracketed by '<' and '>', stopping
/// before the '>'.
pub fn parse_seq_to_before_gt<T, F>(&mut self,
sep: Option<token::Token>,
mut f: F)
-> PResult<OwnedSlice<T>> where
F: FnMut(&mut Parser) -> PResult<T>,
{
let (result, returned) = try!(self.parse_seq_to_before_gt_or_return(sep,
|p| Ok(Some(try!(f(p))))));
assert!(!returned);
return Ok(result);
}
pub fn parse_seq_to_gt<T, F>(&mut self,
sep: Option<token::Token>,
f: F)
-> PResult<OwnedSlice<T>> where
F: FnMut(&mut Parser) -> PResult<T>,
{
let v = try!(self.parse_seq_to_before_gt(sep, f));
try!(self.expect_gt());
return Ok(v);
}
pub fn parse_seq_to_gt_or_return<T, F>(&mut self,
sep: Option<token::Token>,
f: F)
-> PResult<(OwnedSlice<T>, bool)> where
F: FnMut(&mut Parser) -> PResult<Option<T>>,
{
let (v, returned) = try!(self.parse_seq_to_before_gt_or_return(sep, f));
if !returned {
try!(self.expect_gt());
}
return Ok((v, returned));
}
/// Parse a sequence, including the closing delimiter. The function
/// f must consume tokens until reaching the next separator or
/// closing bracket.
pub fn parse_seq_to_end<T, F>(&mut self,
ket: &token::Token,
sep: SeqSep,
f: F)
-> PResult<Vec<T>> where
F: FnMut(&mut Parser) -> PResult<T>,
{
let val = try!(self.parse_seq_to_before_end(ket, sep, f));
try!(self.bump());
Ok(val)
}
/// Parse a sequence, not including the closing delimiter. The function
/// f must consume tokens until reaching the next separator or
/// closing bracket.
pub fn parse_seq_to_before_end<T, F>(&mut self,
ket: &token::Token,
sep: SeqSep,
mut f: F)
-> PResult<Vec<T>> where
F: FnMut(&mut Parser) -> PResult<T>,
{
let mut first: bool = true;
let mut v = vec!();
while self.token != *ket {
match sep.sep {
Some(ref t) => {
if first { first = false; }
else { try!(self.expect(t)); }
}
_ => ()
}
if sep.trailing_sep_allowed && self.check(ket) { break; }
v.push(try!(f(self)));
}
return Ok(v);
}
/// Parse a sequence, including the closing delimiter. The function
/// f must consume tokens until reaching the next separator or
/// closing bracket.
pub fn parse_unspanned_seq<T, F>(&mut self,
bra: &token::Token,
ket: &token::Token,
sep: SeqSep,
f: F)
-> PResult<Vec<T>> where
F: FnMut(&mut Parser) -> PResult<T>,
{
try!(self.expect(bra));
let result = try!(self.parse_seq_to_before_end(ket, sep, f));
try!(self.bump());
Ok(result)
}
/// Parse a sequence parameter of enum variant. For consistency purposes,
/// these should not be empty.
pub fn parse_enum_variant_seq<T, F>(&mut self,
bra: &token::Token,
ket: &token::Token,
sep: SeqSep,
f: F)
-> PResult<Vec<T>> where
F: FnMut(&mut Parser) -> PResult<T>,
{
let result = try!(self.parse_unspanned_seq(bra, ket, sep, f));
if result.is_empty() {
let last_span = self.last_span;
self.span_err(last_span,
"nullary enum variants are written with no trailing `( )`");
}
Ok(result)
}
// NB: Do not use this function unless you actually plan to place the
// spanned list in the AST.
pub fn parse_seq<T, F>(&mut self,
bra: &token::Token,
ket: &token::Token,
sep: SeqSep,
f: F)
-> PResult<Spanned<Vec<T>>> where
F: FnMut(&mut Parser) -> PResult<T>,
{
let lo = self.span.lo;
try!(self.expect(bra));
let result = try!(self.parse_seq_to_before_end(ket, sep, f));
let hi = self.span.hi;
try!(self.bump());
Ok(spanned(lo, hi, result))
}
/// Advance the parser by one token
pub fn bump(&mut self) -> PResult<()> {
self.last_span = self.span;
// Stash token for error recovery (sometimes; clone is not necessarily cheap).
self.last_token = if self.token.is_ident() ||
self.token.is_path() ||
self.token == token::Comma {
Some(Box::new(self.token.clone()))
} else {
None
};
let next = if self.buffer_start == self.buffer_end {
self.reader.real_token()
} else {
// Avoid token copies with `replace`.
let buffer_start = self.buffer_start as usize;
let next_index = (buffer_start + 1) & 3;
self.buffer_start = next_index as isize;
let placeholder = TokenAndSpan {
tok: token::Underscore,
sp: self.span,
};
mem::replace(&mut self.buffer[buffer_start], placeholder)
};
self.span = next.sp;
self.token = next.tok;
self.tokens_consumed += 1;
self.expected_tokens.clear();
// check after each token
self.check_unknown_macro_variable()
}
/// Advance the parser by one token and return the bumped token.
pub fn bump_and_get(&mut self) -> PResult<token::Token> {
let old_token = mem::replace(&mut self.token, token::Underscore);
try!(self.bump());
Ok(old_token)
}
/// EFFECT: replace the current token and span with the given one
pub fn replace_token(&mut self,
next: token::Token,
lo: BytePos,
hi: BytePos) {
self.last_span = mk_sp(self.span.lo, lo);
self.token = next;
self.span = mk_sp(lo, hi);
}
pub fn buffer_length(&mut self) -> isize {
if self.buffer_start <= self.buffer_end {
return self.buffer_end - self.buffer_start;
}
return (4 - self.buffer_start) + self.buffer_end;
}
pub fn look_ahead<R, F>(&mut self, distance: usize, f: F) -> R where
F: FnOnce(&token::Token) -> R,
{
let dist = distance as isize;
while self.buffer_length() < dist {
self.buffer[self.buffer_end as usize] = self.reader.real_token();
self.buffer_end = (self.buffer_end + 1) & 3;
}
f(&self.buffer[((self.buffer_start + dist - 1) & 3) as usize].tok)
}
pub fn fatal(&self, m: &str) -> diagnostic::FatalError {
self.sess.span_diagnostic.span_fatal(self.span, m)
}
pub fn span_fatal(&self, sp: Span, m: &str) -> diagnostic::FatalError {
self.sess.span_diagnostic.span_fatal(sp, m)
}
pub fn span_fatal_help(&self, sp: Span, m: &str, help: &str) -> diagnostic::FatalError {
self.span_err(sp, m);
self.fileline_help(sp, help);
diagnostic::FatalError
}
pub fn span_note(&self, sp: Span, m: &str) {
self.sess.span_diagnostic.span_note(sp, m)
}
pub fn span_help(&self, sp: Span, m: &str) {
self.sess.span_diagnostic.span_help(sp, m)
}
pub fn span_suggestion(&self, sp: Span, m: &str, n: String) {
self.sess.span_diagnostic.span_suggestion(sp, m, n)
}
pub fn fileline_help(&self, sp: Span, m: &str) {
self.sess.span_diagnostic.fileline_help(sp, m)
}
pub fn bug(&self, m: &str) -> ! {
self.sess.span_diagnostic.span_bug(self.span, m)
}
pub fn warn(&self, m: &str) {
self.sess.span_diagnostic.span_warn(self.span, m)
}
pub fn span_warn(&self, sp: Span, m: &str) {
self.sess.span_diagnostic.span_warn(sp, m)
}
pub fn span_err(&self, sp: Span, m: &str) {
self.sess.span_diagnostic.span_err(sp, m)
}
pub fn span_bug(&self, sp: Span, m: &str) -> ! {
self.sess.span_diagnostic.span_bug(sp, m)
}
pub fn abort_if_errors(&self) {
self.sess.span_diagnostic.handler().abort_if_errors();
}
pub fn id_to_interned_str(&mut self, id: Ident) -> InternedString {
token::get_ident(id)
}
/// Is the current token one of the keywords that signals a bare function
/// type?
pub fn token_is_bare_fn_keyword(&mut self) -> bool {
self.check_keyword(keywords::Fn) ||
self.check_keyword(keywords::Unsafe) ||
self.check_keyword(keywords::Extern)
}
pub fn get_lifetime(&mut self) -> ast::Ident {
match self.token {
token::Lifetime(ref ident) => *ident,
_ => self.bug("not a lifetime"),
}
}
pub fn parse_for_in_type(&mut self) -> PResult<Ty_> {
/*
Parses whatever can come after a `for` keyword in a type.
The `for` has already been consumed.
Deprecated:
- for <'lt> |S| -> T
Eventually:
- for <'lt> [unsafe] [extern "ABI"] fn (S) -> T
- for <'lt> path::foo(a, b)
*/
// parse <'lt>
let lo = self.span.lo;
let lifetime_defs = try!(self.parse_late_bound_lifetime_defs());
// examine next token to decide to do
if self.token_is_bare_fn_keyword() {
self.parse_ty_bare_fn(lifetime_defs)
} else {
let hi = self.span.hi;
let trait_ref = try!(self.parse_trait_ref());
let poly_trait_ref = ast::PolyTraitRef { bound_lifetimes: lifetime_defs,
trait_ref: trait_ref,
span: mk_sp(lo, hi)};
let other_bounds = if try!(self.eat(&token::BinOp(token::Plus)) ){
try!(self.parse_ty_param_bounds(BoundParsingMode::Bare))
} else {
OwnedSlice::empty()
};
let all_bounds =
Some(TraitTyParamBound(poly_trait_ref, TraitBoundModifier::None)).into_iter()
.chain(other_bounds.into_vec().into_iter())
.collect();
Ok(ast::TyPolyTraitRef(all_bounds))
}
}
pub fn parse_ty_path(&mut self) -> PResult<Ty_> {
Ok(TyPath(None, try!(self.parse_path(LifetimeAndTypesWithoutColons))))
}
/// parse a TyBareFn type:
pub fn parse_ty_bare_fn(&mut self, lifetime_defs: Vec<ast::LifetimeDef>) -> PResult<Ty_> {
/*
[unsafe] [extern "ABI"] fn <'lt> (S) -> T
^~~~^ ^~~~^ ^~~~^ ^~^ ^
| | | | |
| | | | Return type
| | | Argument types
| | Lifetimes
| ABI
Function Style
*/
let unsafety = try!(self.parse_unsafety());
let abi = if try!(self.eat_keyword(keywords::Extern) ){
try!(self.parse_opt_abi()).unwrap_or(abi::C)
} else {
abi::Rust
};
try!(self.expect_keyword(keywords::Fn));
let (inputs, variadic) = try!(self.parse_fn_args(false, true));
let ret_ty = try!(self.parse_ret_ty());
let decl = P(FnDecl {
inputs: inputs,
output: ret_ty,
variadic: variadic
});
Ok(TyBareFn(P(BareFnTy {
abi: abi,
unsafety: unsafety,
lifetimes: lifetime_defs,
decl: decl
})))
}
/// Parses an obsolete closure kind (`&:`, `&mut:`, or `:`).
pub fn parse_obsolete_closure_kind(&mut self) -> PResult<()> {
let lo = self.span.lo;
if
self.check(&token::BinOp(token::And)) &&
self.look_ahead(1, |t| t.is_keyword(keywords::Mut)) &&
self.look_ahead(2, |t| *t == token::Colon)
{
try!(self.bump());
try!(self.bump());
try!(self.bump());
} else if
self.token == token::BinOp(token::And) &&
self.look_ahead(1, |t| *t == token::Colon)
{
try!(self.bump());
try!(self.bump());
} else if
try!(self.eat(&token::Colon))
{
/* nothing */
} else {
return Ok(());
}
let span = mk_sp(lo, self.span.hi);
self.obsolete(span, ObsoleteSyntax::ClosureKind);
Ok(())
}
pub fn parse_unsafety(&mut self) -> PResult<Unsafety> {
if try!(self.eat_keyword(keywords::Unsafe)) {
return Ok(Unsafety::Unsafe);
} else {
return Ok(Unsafety::Normal);
}
}
/// Parse the items in a trait declaration
pub fn parse_trait_items(&mut self) -> PResult<Vec<P<TraitItem>>> {
self.parse_unspanned_seq(
&token::OpenDelim(token::Brace),
&token::CloseDelim(token::Brace),
seq_sep_none(),
|p| -> PResult<P<TraitItem>> {
maybe_whole!(no_clone p, NtTraitItem);
let mut attrs = p.parse_outer_attributes();
let lo = p.span.lo;
let (name, node) = if try!(p.eat_keyword(keywords::Type)) {
let TyParam {ident, bounds, default, ..} = try!(p.parse_ty_param());
try!(p.expect(&token::Semi));
(ident, TypeTraitItem(bounds, default))
} else if try!(p.eat_keyword(keywords::Const)) {
let ident = try!(p.parse_ident());
try!(p.expect(&token::Colon));
let ty = try!(p.parse_ty_sum());
let default = if p.check(&token::Eq) {
try!(p.bump());
let expr = try!(p.parse_expr_nopanic());
try!(p.commit_expr_expecting(&expr, token::Semi));
Some(expr)
} else {
try!(p.expect(&token::Semi));
None
};
(ident, ConstTraitItem(ty, default))
} else {
let style = try!(p.parse_unsafety());
let abi = if try!(p.eat_keyword(keywords::Extern)) {
try!(p.parse_opt_abi()).unwrap_or(abi::C)
} else {
abi::Rust
};
try!(p.expect_keyword(keywords::Fn));
let ident = try!(p.parse_ident());
let mut generics = try!(p.parse_generics());
let (explicit_self, d) = try!(p.parse_fn_decl_with_self(|p|{
// This is somewhat dubious; We don't want to allow
// argument names to be left off if there is a
// definition...
p.parse_arg_general(false)
}));
generics.where_clause = try!(p.parse_where_clause());
let sig = ast::MethodSig {
unsafety: style,
decl: d,
generics: generics,
abi: abi,
explicit_self: explicit_self,
};
let body = match p.token {
token::Semi => {
try!(p.bump());
debug!("parse_trait_methods(): parsing required method");
None
}
token::OpenDelim(token::Brace) => {
debug!("parse_trait_methods(): parsing provided method");
let (inner_attrs, body) =
try!(p.parse_inner_attrs_and_block());
attrs.extend(inner_attrs.iter().cloned());
Some(body)
}
_ => {
let token_str = p.this_token_to_string();
return Err(p.fatal(&format!("expected `;` or `{{`, found `{}`",
token_str)[..]))
}
};
(ident, ast::MethodTraitItem(sig, body))
};
Ok(P(TraitItem {
id: ast::DUMMY_NODE_ID,
ident: name,
attrs: attrs,
node: node,
span: mk_sp(lo, p.last_span.hi),
}))
})
}
/// Parse a possibly mutable type
pub fn parse_mt(&mut self) -> PResult<MutTy> {
let mutbl = try!(self.parse_mutability());
let t = try!(self.parse_ty_nopanic());
Ok(MutTy { ty: t, mutbl: mutbl })
}
/// Parse optional return type [ -> TY ] in function decl
pub fn parse_ret_ty(&mut self) -> PResult<FunctionRetTy> {
if try!(self.eat(&token::RArrow) ){
if try!(self.eat(&token::Not) ){
Ok(NoReturn(self.span))
} else {
Ok(Return(try!(self.parse_ty_nopanic())))
}
} else {
let pos = self.span.lo;
Ok(DefaultReturn(mk_sp(pos, pos)))
}
}
/// Parse a type in a context where `T1+T2` is allowed.
pub fn parse_ty_sum(&mut self) -> PResult<P<Ty>> {
let lo = self.span.lo;
let lhs = try!(self.parse_ty_nopanic());
if !try!(self.eat(&token::BinOp(token::Plus)) ){
return Ok(lhs);
}
let bounds = try!(self.parse_ty_param_bounds(BoundParsingMode::Bare));
// In type grammar, `+` is treated like a binary operator,
// and hence both L and R side are required.
if bounds.is_empty() {
let last_span = self.last_span;
self.span_err(last_span,
"at least one type parameter bound \
must be specified");
}
let sp = mk_sp(lo, self.last_span.hi);
let sum = ast::TyObjectSum(lhs, bounds);
Ok(P(Ty {id: ast::DUMMY_NODE_ID, node: sum, span: sp}))
}
/// Parse a type.
pub fn parse_ty_nopanic(&mut self) -> PResult<P<Ty>> {
maybe_whole!(no_clone self, NtTy);
let lo = self.span.lo;
let t = if self.check(&token::OpenDelim(token::Paren)) {
try!(self.bump());
// (t) is a parenthesized ty
// (t,) is the type of a tuple with only one field,
// of type t
let mut ts = vec![];
let mut last_comma = false;
while self.token != token::CloseDelim(token::Paren) {
ts.push(try!(self.parse_ty_sum()));
if self.check(&token::Comma) {
last_comma = true;
try!(self.bump());
} else {
last_comma = false;
break;
}
}
try!(self.expect(&token::CloseDelim(token::Paren)));
if ts.len() == 1 && !last_comma {
TyParen(ts.into_iter().nth(0).unwrap())
} else {
TyTup(ts)
}
} else if self.check(&token::BinOp(token::Star)) {
// STAR POINTER (bare pointer?)
try!(self.bump());
TyPtr(try!(self.parse_ptr()))
} else if self.check(&token::OpenDelim(token::Bracket)) {
// VECTOR
try!(self.expect(&token::OpenDelim(token::Bracket)));
let t = try!(self.parse_ty_sum());
// Parse the `; e` in `[ i32; e ]`
// where `e` is a const expression
let t = match try!(self.maybe_parse_fixed_length_of_vec()) {
None => TyVec(t),
Some(suffix) => TyFixedLengthVec(t, suffix)
};
try!(self.expect(&token::CloseDelim(token::Bracket)));
t
} else if self.check(&token::BinOp(token::And)) ||
self.token == token::AndAnd {
// BORROWED POINTER
try!(self.expect_and());
try!(self.parse_borrowed_pointee())
} else if self.check_keyword(keywords::For) {
try!(self.parse_for_in_type())
} else if self.token_is_bare_fn_keyword() {
// BARE FUNCTION
try!(self.parse_ty_bare_fn(Vec::new()))
} else if try!(self.eat_keyword_noexpect(keywords::Typeof)) {
// TYPEOF
// In order to not be ambiguous, the type must be surrounded by parens.
try!(self.expect(&token::OpenDelim(token::Paren)));
let e = try!(self.parse_expr_nopanic());
try!(self.expect(&token::CloseDelim(token::Paren)));
TyTypeof(e)
} else if try!(self.eat_lt()) {
let (qself, path) =
try!(self.parse_qualified_path(QPathParsingMode::NoParameters));
TyPath(Some(qself), path)
} else if self.check(&token::ModSep) ||
self.token.is_ident() ||
self.token.is_path() {
// NAMED TYPE
try!(self.parse_ty_path())
} else if try!(self.eat(&token::Underscore) ){
// TYPE TO BE INFERRED
TyInfer
} else {
let this_token_str = self.this_token_to_string();
let msg = format!("expected type, found `{}`", this_token_str);
return Err(self.fatal(&msg[..]));
};
let sp = mk_sp(lo, self.last_span.hi);
Ok(P(Ty {id: ast::DUMMY_NODE_ID, node: t, span: sp}))
}
pub fn parse_borrowed_pointee(&mut self) -> PResult<Ty_> {
// look for `&'lt` or `&'foo ` and interpret `foo` as the region name:
let opt_lifetime = try!(self.parse_opt_lifetime());
let mt = try!(self.parse_mt());
return Ok(TyRptr(opt_lifetime, mt));
}
pub fn parse_ptr(&mut self) -> PResult<MutTy> {
let mutbl = if try!(self.eat_keyword(keywords::Mut) ){
MutMutable
} else if try!(self.eat_keyword(keywords::Const) ){
MutImmutable
} else {
let span = self.last_span;
self.span_err(span,
"bare raw pointers are no longer allowed, you should \
likely use `*mut T`, but otherwise `*T` is now \
known as `*const T`");
MutImmutable
};
let t = try!(self.parse_ty_nopanic());
Ok(MutTy { ty: t, mutbl: mutbl })
}
pub fn is_named_argument(&mut self) -> bool {
let offset = match self.token {
token::BinOp(token::And) => 1,
token::AndAnd => 1,
_ if self.token.is_keyword(keywords::Mut) => 1,
_ => 0
};
debug!("parser is_named_argument offset:{}", offset);
if offset == 0 {
is_plain_ident_or_underscore(&self.token)
&& self.look_ahead(1, |t| *t == token::Colon)
} else {
self.look_ahead(offset, |t| is_plain_ident_or_underscore(t))
&& self.look_ahead(offset + 1, |t| *t == token::Colon)
}
}
/// This version of parse arg doesn't necessarily require
/// identifier names.
pub fn parse_arg_general(&mut self, require_name: bool) -> PResult<Arg> {
let pat = if require_name || self.is_named_argument() {
debug!("parse_arg_general parse_pat (require_name:{})",
require_name);
let pat = try!(self.parse_pat_nopanic());
try!(self.expect(&token::Colon));
pat
} else {
debug!("parse_arg_general ident_to_pat");
ast_util::ident_to_pat(ast::DUMMY_NODE_ID,
self.last_span,
special_idents::invalid)
};
let t = try!(self.parse_ty_sum());
Ok(Arg {
ty: t,
pat: pat,
id: ast::DUMMY_NODE_ID,
})
}
/// Parse a single function argument
pub fn parse_arg(&mut self) -> PResult<Arg> {
self.parse_arg_general(true)
}
/// Parse an argument in a lambda header e.g. |arg, arg|
pub fn parse_fn_block_arg(&mut self) -> PResult<Arg> {
let pat = try!(self.parse_pat_nopanic());
let t = if try!(self.eat(&token::Colon) ){
try!(self.parse_ty_sum())
} else {
P(Ty {
id: ast::DUMMY_NODE_ID,
node: TyInfer,
span: mk_sp(self.span.lo, self.span.hi),
})
};
Ok(Arg {
ty: t,
pat: pat,
id: ast::DUMMY_NODE_ID
})
}
pub fn maybe_parse_fixed_length_of_vec(&mut self) -> PResult<Option<P<ast::Expr>>> {
if self.check(&token::Semi) {
try!(self.bump());
Ok(Some(try!(self.parse_expr_nopanic())))
} else {
Ok(None)
}
}
/// Matches token_lit = LIT_INTEGER | ...
pub fn lit_from_token(&self, tok: &token::Token) -> PResult<Lit_> {
match *tok {
token::Interpolated(token::NtExpr(ref v)) => {
match v.node {
ExprLit(ref lit) => { Ok(lit.node.clone()) }
_ => { return Err(self.unexpected_last(tok)); }
}
}
token::Literal(lit, suf) => {
let (suffix_illegal, out) = match lit {
token::Byte(i) => (true, LitByte(parse::byte_lit(i.as_str()).0)),
token::Char(i) => (true, LitChar(parse::char_lit(i.as_str()).0)),
// there are some valid suffixes for integer and
// float literals, so all the handling is done
// internally.
token::Integer(s) => {
(false, parse::integer_lit(s.as_str(),
suf.as_ref().map(|s| s.as_str()),
&self.sess.span_diagnostic,
self.last_span))
}
token::Float(s) => {
(false, parse::float_lit(s.as_str(),
suf.as_ref().map(|s| s.as_str()),
&self.sess.span_diagnostic,
self.last_span))
}
token::Str_(s) => {
(true,
LitStr(token::intern_and_get_ident(&parse::str_lit(s.as_str())),
ast::CookedStr))
}
token::StrRaw(s, n) => {
(true,
LitStr(
token::intern_and_get_ident(&parse::raw_str_lit(s.as_str())),
ast::RawStr(n)))
}
token::Binary(i) =>
(true, LitBinary(parse::binary_lit(i.as_str()))),
token::BinaryRaw(i, _) =>
(true,
LitBinary(Rc::new(i.as_str().as_bytes().iter().cloned().collect()))),
};
if suffix_illegal {
let sp = self.last_span;
self.expect_no_suffix(sp, &*format!("{} literal", lit.short_name()), suf)
}
Ok(out)
}
_ => { return Err(self.unexpected_last(tok)); }
}
}
/// Matches lit = true | false | token_lit
pub fn parse_lit(&mut self) -> PResult<Lit> {
let lo = self.span.lo;
let lit = if try!(self.eat_keyword(keywords::True) ){
LitBool(true)
} else if try!(self.eat_keyword(keywords::False) ){
LitBool(false)
} else {
let token = try!(self.bump_and_get());
let lit = try!(self.lit_from_token(&token));
lit
};
Ok(codemap::Spanned { node: lit, span: mk_sp(lo, self.last_span.hi) })
}
/// matches '-' lit | lit
pub fn parse_literal_maybe_minus(&mut self) -> PResult<P<Expr>> {
let minus_lo = self.span.lo;
let minus_present = try!(self.eat(&token::BinOp(token::Minus)));
let lo = self.span.lo;
let literal = P(try!(self.parse_lit()));
let hi = self.span.hi;
let expr = self.mk_expr(lo, hi, ExprLit(literal));
if minus_present {
let minus_hi = self.span.hi;
let unary = self.mk_unary(UnNeg, expr);
Ok(self.mk_expr(minus_lo, minus_hi, unary))
} else {
Ok(expr)
}
}
// QUALIFIED PATH `<TYPE [as TRAIT_REF]>::IDENT[::<PARAMS>]`
// Assumes that the leading `<` has been parsed already.
pub fn parse_qualified_path(&mut self, mode: QPathParsingMode)
-> PResult<(QSelf, ast::Path)> {
let self_type = try!(self.parse_ty_sum());
let mut path = if try!(self.eat_keyword(keywords::As)) {
try!(self.parse_path(LifetimeAndTypesWithoutColons))
} else {
ast::Path {
span: self.span,
global: false,
segments: vec![]
}
};
let qself = QSelf {
ty: self_type,
position: path.segments.len()
};
try!(self.expect(&token::Gt));
try!(self.expect(&token::ModSep));
let item_name = try!(self.parse_ident());
let parameters = match mode {
QPathParsingMode::NoParameters => ast::PathParameters::none(),
QPathParsingMode::MaybeParameters => {
if try!(self.eat(&token::ModSep)) {
try!(self.expect_lt());
// Consumed `item::<`, go look for types
let (lifetimes, types, bindings) =
try!(self.parse_generic_values_after_lt());
ast::AngleBracketedParameters(ast::AngleBracketedParameterData {
lifetimes: lifetimes,
types: OwnedSlice::from_vec(types),
bindings: OwnedSlice::from_vec(bindings),
})
} else {
ast::PathParameters::none()
}
}
};
path.segments.push(ast::PathSegment {
identifier: item_name,
parameters: parameters
});
if path.segments.len() == 1 {
path.span.lo = self.last_span.lo;
}
path.span.hi = self.last_span.hi;
Ok((qself, path))
}
/// Parses a path and optional type parameter bounds, depending on the
/// mode. The `mode` parameter determines whether lifetimes, types, and/or
/// bounds are permitted and whether `::` must precede type parameter
/// groups.
pub fn parse_path(&mut self, mode: PathParsingMode) -> PResult<ast::Path> {
// Check for a whole path...
let found = match self.token {
token::Interpolated(token::NtPath(_)) => Some(try!(self.bump_and_get())),
_ => None,
};
if let Some(token::Interpolated(token::NtPath(path))) = found {
return Ok(*path);
}
let lo = self.span.lo;
let is_global = try!(self.eat(&token::ModSep));
// Parse any number of segments and bound sets. A segment is an
// identifier followed by an optional lifetime and a set of types.
// A bound set is a set of type parameter bounds.
let segments = match mode {
LifetimeAndTypesWithoutColons => {
try!(self.parse_path_segments_without_colons())
}
LifetimeAndTypesWithColons => {
try!(self.parse_path_segments_with_colons())
}
NoTypesAllowed => {
try!(self.parse_path_segments_without_types())
}
};
// Assemble the span.
let span = mk_sp(lo, self.last_span.hi);
// Assemble the result.
Ok(ast::Path {
span: span,
global: is_global,
segments: segments,
})
}
/// Examples:
/// - `a::b<T,U>::c<V,W>`
/// - `a::b<T,U>::c(V) -> W`
/// - `a::b<T,U>::c(V)`
pub fn parse_path_segments_without_colons(&mut self) -> PResult<Vec<ast::PathSegment>> {
let mut segments = Vec::new();
loop {
// First, parse an identifier.
let identifier = try!(self.parse_ident_or_self_type());
// Parse types, optionally.
let parameters = if try!(self.eat_lt() ){
let (lifetimes, types, bindings) = try!(self.parse_generic_values_after_lt());
ast::AngleBracketedParameters(ast::AngleBracketedParameterData {
lifetimes: lifetimes,
types: OwnedSlice::from_vec(types),
bindings: OwnedSlice::from_vec(bindings),
})
} else if try!(self.eat(&token::OpenDelim(token::Paren)) ){
let lo = self.last_span.lo;
let inputs = try!(self.parse_seq_to_end(
&token::CloseDelim(token::Paren),
seq_sep_trailing_allowed(token::Comma),
|p| p.parse_ty_sum()));
let output_ty = if try!(self.eat(&token::RArrow) ){
Some(try!(self.parse_ty_nopanic()))
} else {
None
};
let hi = self.last_span.hi;
ast::ParenthesizedParameters(ast::ParenthesizedParameterData {
span: mk_sp(lo, hi),
inputs: inputs,
output: output_ty,
})
} else {
ast::PathParameters::none()
};
// Assemble and push the result.
segments.push(ast::PathSegment { identifier: identifier,
parameters: parameters });
// Continue only if we see a `::`
if !try!(self.eat(&token::ModSep) ){
return Ok(segments);
}
}
}
/// Examples:
/// - `a::b::<T,U>::c`
pub fn parse_path_segments_with_colons(&mut self) -> PResult<Vec<ast::PathSegment>> {
let mut segments = Vec::new();
loop {
// First, parse an identifier.
let identifier = try!(self.parse_ident_or_self_type());
// If we do not see a `::`, stop.
if !try!(self.eat(&token::ModSep) ){
segments.push(ast::PathSegment {
identifier: identifier,
parameters: ast::PathParameters::none()
});
return Ok(segments);
}
// Check for a type segment.
if try!(self.eat_lt() ){
// Consumed `a::b::<`, go look for types
let (lifetimes, types, bindings) = try!(self.parse_generic_values_after_lt());
segments.push(ast::PathSegment {
identifier: identifier,
parameters: ast::AngleBracketedParameters(ast::AngleBracketedParameterData {
lifetimes: lifetimes,
types: OwnedSlice::from_vec(types),
bindings: OwnedSlice::from_vec(bindings),
}),
});
// Consumed `a::b::<T,U>`, check for `::` before proceeding
if !try!(self.eat(&token::ModSep) ){
return Ok(segments);
}
} else {
// Consumed `a::`, go look for `b`
segments.push(ast::PathSegment {
identifier: identifier,
parameters: ast::PathParameters::none(),
});
}
}
}
/// Examples:
/// - `a::b::c`
pub fn parse_path_segments_without_types(&mut self) -> PResult<Vec<ast::PathSegment>> {
let mut segments = Vec::new();
loop {
// First, parse an identifier.
let identifier = try!(self.parse_ident_or_self_type());
// Assemble and push the result.
segments.push(ast::PathSegment {
identifier: identifier,
parameters: ast::PathParameters::none()
});
// If we do not see a `::`, stop.
if !try!(self.eat(&token::ModSep) ){
return Ok(segments);
}
}
}
/// parses 0 or 1 lifetime
pub fn parse_opt_lifetime(&mut self) -> PResult<Option<ast::Lifetime>> {
match self.token {
token::Lifetime(..) => {
Ok(Some(try!(self.parse_lifetime())))
}
_ => {
Ok(None)
}
}
}
/// Parses a single lifetime
/// Matches lifetime = LIFETIME
pub fn parse_lifetime(&mut self) -> PResult<ast::Lifetime> {
match self.token {
token::Lifetime(i) => {
let span = self.span;
try!(self.bump());
return Ok(ast::Lifetime {
id: ast::DUMMY_NODE_ID,
span: span,
name: i.name
});
}
_ => {
return Err(self.fatal(&format!("expected a lifetime name")));
}
}
}
/// Parses `lifetime_defs = [ lifetime_defs { ',' lifetime_defs } ]` where `lifetime_def =
/// lifetime [':' lifetimes]`
pub fn parse_lifetime_defs(&mut self) -> PResult<Vec<ast::LifetimeDef>> {
let mut res = Vec::new();
loop {
match self.token {
token::Lifetime(_) => {
let lifetime = try!(self.parse_lifetime());
let bounds =
if try!(self.eat(&token::Colon) ){
try!(self.parse_lifetimes(token::BinOp(token::Plus)))
} else {
Vec::new()
};
res.push(ast::LifetimeDef { lifetime: lifetime,
bounds: bounds });
}
_ => {
return Ok(res);
}
}
match self.token {
token::Comma => { try!(self.bump());}
token::Gt => { return Ok(res); }
token::BinOp(token::Shr) => { return Ok(res); }
_ => {
let this_token_str = self.this_token_to_string();
let msg = format!("expected `,` or `>` after lifetime \
name, found `{}`",
this_token_str);
return Err(self.fatal(&msg[..]));
}
}
}
}
/// matches lifetimes = ( lifetime ) | ( lifetime , lifetimes ) actually, it matches the empty
/// one too, but putting that in there messes up the grammar....
///
/// Parses zero or more comma separated lifetimes. Expects each lifetime to be followed by
/// either a comma or `>`. Used when parsing type parameter lists, where we expect something
/// like `<'a, 'b, T>`.
pub fn parse_lifetimes(&mut self, sep: token::Token) -> PResult<Vec<ast::Lifetime>> {
let mut res = Vec::new();
loop {
match self.token {
token::Lifetime(_) => {
res.push(try!(self.parse_lifetime()));
}
_ => {
return Ok(res);
}
}
if self.token != sep {
return Ok(res);
}
try!(self.bump());
}
}
/// Parse mutability declaration (mut/const/imm)
pub fn parse_mutability(&mut self) -> PResult<Mutability> {
if try!(self.eat_keyword(keywords::Mut) ){
Ok(MutMutable)
} else {
Ok(MutImmutable)
}
}
/// Parse ident COLON expr
pub fn parse_field(&mut self) -> PResult<Field> {
let lo = self.span.lo;
let i = try!(self.parse_ident());
let hi = self.last_span.hi;
try!(self.expect(&token::Colon));
let e = try!(self.parse_expr_nopanic());
Ok(ast::Field {
ident: spanned(lo, hi, i),
span: mk_sp(lo, e.span.hi),
expr: e,
})
}
pub fn mk_expr(&mut self, lo: BytePos, hi: BytePos, node: Expr_) -> P<Expr> {
P(Expr {
id: ast::DUMMY_NODE_ID,
node: node,
span: mk_sp(lo, hi),
})
}
pub fn mk_unary(&mut self, unop: ast::UnOp, expr: P<Expr>) -> ast::Expr_ {
ExprUnary(unop, expr)
}
pub fn mk_binary(&mut self, binop: ast::BinOp, lhs: P<Expr>, rhs: P<Expr>) -> ast::Expr_ {
ExprBinary(binop, lhs, rhs)
}
pub fn mk_call(&mut self, f: P<Expr>, args: Vec<P<Expr>>) -> ast::Expr_ {
ExprCall(f, args)
}
fn mk_method_call(&mut self,
ident: ast::SpannedIdent,
tps: Vec<P<Ty>>,
args: Vec<P<Expr>>)
-> ast::Expr_ {
ExprMethodCall(ident, tps, args)
}
pub fn mk_index(&mut self, expr: P<Expr>, idx: P<Expr>) -> ast::Expr_ {
ExprIndex(expr, idx)
}
pub fn mk_range(&mut self,
start: Option<P<Expr>>,
end: Option<P<Expr>>)
-> ast::Expr_ {
ExprRange(start, end)
}
pub fn mk_field(&mut self, expr: P<Expr>, ident: ast::SpannedIdent) -> ast::Expr_ {
ExprField(expr, ident)
}
pub fn mk_tup_field(&mut self, expr: P<Expr>, idx: codemap::Spanned<usize>) -> ast::Expr_ {
ExprTupField(expr, idx)
}
pub fn mk_assign_op(&mut self, binop: ast::BinOp,
lhs: P<Expr>, rhs: P<Expr>) -> ast::Expr_ {
ExprAssignOp(binop, lhs, rhs)
}
pub fn mk_mac_expr(&mut self, lo: BytePos, hi: BytePos, m: Mac_) -> P<Expr> {
P(Expr {
id: ast::DUMMY_NODE_ID,
node: ExprMac(codemap::Spanned {node: m, span: mk_sp(lo, hi)}),
span: mk_sp(lo, hi),
})
}
pub fn mk_lit_u32(&mut self, i: u32) -> P<Expr> {
let span = &self.span;
let lv_lit = P(codemap::Spanned {
node: LitInt(i as u64, ast::UnsignedIntLit(TyU32)),
span: *span
});
P(Expr {
id: ast::DUMMY_NODE_ID,
node: ExprLit(lv_lit),
span: *span,
})
}
fn expect_open_delim(&mut self) -> PResult<token::DelimToken> {
self.expected_tokens.push(TokenType::Token(token::Gt));
match self.token {
token::OpenDelim(delim) => {
try!(self.bump());
Ok(delim)
},
_ => Err(self.fatal("expected open delimiter")),
}
}
/// At the bottom (top?) of the precedence hierarchy,
/// parse things like parenthesized exprs,
/// macros, return, etc.
pub fn parse_bottom_expr(&mut self) -> PResult<P<Expr>> {
maybe_whole_expr!(self);
let lo = self.span.lo;
let mut hi = self.span.hi;
let ex: Expr_;
// Note: when adding new syntax here, don't forget to adjust Token::can_begin_expr().
match self.token {
token::OpenDelim(token::Paren) => {
try!(self.bump());
// (e) is parenthesized e
// (e,) is a tuple with only one field, e
let mut es = vec![];
let mut trailing_comma = false;
while self.token != token::CloseDelim(token::Paren) {
es.push(try!(self.parse_expr_nopanic()));
try!(self.commit_expr(&**es.last().unwrap(), &[],
&[token::Comma, token::CloseDelim(token::Paren)]));
if self.check(&token::Comma) {
trailing_comma = true;
try!(self.bump());
} else {
trailing_comma = false;
break;
}
}
try!(self.bump());
hi = self.last_span.hi;
return if es.len() == 1 && !trailing_comma {
Ok(self.mk_expr(lo, hi, ExprParen(es.into_iter().nth(0).unwrap())))
} else {
Ok(self.mk_expr(lo, hi, ExprTup(es)))
}
},
token::OpenDelim(token::Brace) => {
return self.parse_block_expr(lo, DefaultBlock);
},
token::BinOp(token::Or) | token::OrOr => {
return self.parse_lambda_expr(CaptureByRef);
},
token::Ident(id @ ast::Ident {
name: token::SELF_KEYWORD_NAME,
ctxt: _
}, token::Plain) => {
try!(self.bump());
let path = ast_util::ident_to_path(mk_sp(lo, hi), id);
ex = ExprPath(None, path);
hi = self.last_span.hi;
}
token::OpenDelim(token::Bracket) => {
try!(self.bump());
if self.check(&token::CloseDelim(token::Bracket)) {
// Empty vector.
try!(self.bump());
ex = ExprVec(Vec::new());
} else {
// Nonempty vector.
let first_expr = try!(self.parse_expr_nopanic());
if self.check(&token::Semi) {
// Repeating vector syntax: [ 0; 512 ]
try!(self.bump());
let count = try!(self.parse_expr_nopanic());
try!(self.expect(&token::CloseDelim(token::Bracket)));
ex = ExprRepeat(first_expr, count);
} else if self.check(&token::Comma) {
// Vector with two or more elements.
try!(self.bump());
let remaining_exprs = try!(self.parse_seq_to_end(
&token::CloseDelim(token::Bracket),
seq_sep_trailing_allowed(token::Comma),
|p| Ok(try!(p.parse_expr_nopanic()))
));
let mut exprs = vec!(first_expr);
exprs.extend(remaining_exprs.into_iter());
ex = ExprVec(exprs);
} else {
// Vector with one element.
try!(self.expect(&token::CloseDelim(token::Bracket)));
ex = ExprVec(vec!(first_expr));
}
}
hi = self.last_span.hi;
}
_ => {
if try!(self.eat_lt()){
let (qself, path) =
try!(self.parse_qualified_path(QPathParsingMode::MaybeParameters));
return Ok(self.mk_expr(lo, hi, ExprPath(Some(qself), path)));
}
if try!(self.eat_keyword(keywords::Move) ){
return self.parse_lambda_expr(CaptureByValue);
}
if try!(self.eat_keyword(keywords::If)) {
return self.parse_if_expr();
}
if try!(self.eat_keyword(keywords::For) ){
return self.parse_for_expr(None);
}
if try!(self.eat_keyword(keywords::While) ){
return self.parse_while_expr(None);
}
if self.token.is_lifetime() {
let lifetime = self.get_lifetime();
try!(self.bump());
try!(self.expect(&token::Colon));
if try!(self.eat_keyword(keywords::While) ){
return self.parse_while_expr(Some(lifetime))
}
if try!(self.eat_keyword(keywords::For) ){
return self.parse_for_expr(Some(lifetime))
}
if try!(self.eat_keyword(keywords::Loop) ){
return self.parse_loop_expr(Some(lifetime))
}
return Err(self.fatal("expected `while`, `for`, or `loop` after a label"))
}
if try!(self.eat_keyword(keywords::Loop) ){
return self.parse_loop_expr(None);
}
if try!(self.eat_keyword(keywords::Continue) ){
let lo = self.span.lo;
let ex = if self.token.is_lifetime() {
let lifetime = self.get_lifetime();
try!(self.bump());
ExprAgain(Some(lifetime))
} else {
ExprAgain(None)
};
let hi = self.span.hi;
return Ok(self.mk_expr(lo, hi, ex));
}
if try!(self.eat_keyword(keywords::Match) ){
return self.parse_match_expr();
}
if try!(self.eat_keyword(keywords::Unsafe) ){
return self.parse_block_expr(
lo,
UnsafeBlock(ast::UserProvided));
}
if try!(self.eat_keyword(keywords::Return) ){
// RETURN expression
if self.token.can_begin_expr() {
let e = try!(self.parse_expr_nopanic());
hi = e.span.hi;
ex = ExprRet(Some(e));
} else {
ex = ExprRet(None);
}
} else if try!(self.eat_keyword(keywords::Break) ){
// BREAK expression
if self.token.is_lifetime() {
let lifetime = self.get_lifetime();
try!(self.bump());
ex = ExprBreak(Some(lifetime));
} else {
ex = ExprBreak(None);
}
hi = self.span.hi;
} else if self.check(&token::ModSep) ||
self.token.is_ident() &&
!self.check_keyword(keywords::True) &&
!self.check_keyword(keywords::False) {
let pth =
try!(self.parse_path(LifetimeAndTypesWithColons));
// `!`, as an operator, is prefix, so we know this isn't that
if self.check(&token::Not) {
// MACRO INVOCATION expression
try!(self.bump());
let delim = try!(self.expect_open_delim());
let tts = try!(self.parse_seq_to_end(
&token::CloseDelim(delim),
seq_sep_none(),
|p| p.parse_token_tree()));
let hi = self.last_span.hi;
return Ok(self.mk_mac_expr(lo,
hi,
MacInvocTT(pth,
tts,
EMPTY_CTXT)));
}
if self.check(&token::OpenDelim(token::Brace)) {
// This is a struct literal, unless we're prohibited
// from parsing struct literals here.
if !self.restrictions.contains(RESTRICTION_NO_STRUCT_LITERAL) {
// It's a struct literal.
try!(self.bump());
let mut fields = Vec::new();
let mut base = None;
while self.token != token::CloseDelim(token::Brace) {
if try!(self.eat(&token::DotDot) ){
base = Some(try!(self.parse_expr_nopanic()));
break;
}
fields.push(try!(self.parse_field()));
try!(self.commit_expr(&*fields.last().unwrap().expr,
&[token::Comma],
&[token::CloseDelim(token::Brace)]));
}
if fields.is_empty() && base.is_none() {
let last_span = self.last_span;
self.span_err(last_span,
"structure literal must either \
have at least one field or use \
functional structure update \
syntax");
}
hi = self.span.hi;
try!(self.expect(&token::CloseDelim(token::Brace)));
ex = ExprStruct(pth, fields, base);
return Ok(self.mk_expr(lo, hi, ex));
}
}
hi = pth.span.hi;
ex = ExprPath(None, pth);
} else {
// other literal expression
let lit = try!(self.parse_lit());
hi = lit.span.hi;
ex = ExprLit(P(lit));
}
}
}
return Ok(self.mk_expr(lo, hi, ex));
}
/// Parse a block or unsafe block
pub fn parse_block_expr(&mut self, lo: BytePos, blk_mode: BlockCheckMode)
-> PResult<P<Expr>> {
try!(self.expect(&token::OpenDelim(token::Brace)));
let blk = try!(self.parse_block_tail(lo, blk_mode));
return Ok(self.mk_expr(blk.span.lo, blk.span.hi, ExprBlock(blk)));
}
/// parse a.b or a(13) or a[4] or just a
pub fn parse_dot_or_call_expr(&mut self) -> PResult<P<Expr>> {
let b = try!(self.parse_bottom_expr());
self.parse_dot_or_call_expr_with(b)
}
pub fn parse_dot_or_call_expr_with(&mut self, e0: P<Expr>) -> PResult<P<Expr>> {
let mut e = e0;
let lo = e.span.lo;
let mut hi;
loop {
// expr.f
if try!(self.eat(&token::Dot) ){
match self.token {
token::Ident(i, _) => {
let dot = self.last_span.hi;
hi = self.span.hi;
try!(self.bump());
let (_, tys, bindings) = if try!(self.eat(&token::ModSep) ){
try!(self.expect_lt());
try!(self.parse_generic_values_after_lt())
} else {
(Vec::new(), Vec::new(), Vec::new())
};
if !bindings.is_empty() {
let last_span = self.last_span;
self.span_err(last_span, "type bindings are only permitted on trait paths");
}
// expr.f() method call
match self.token {
token::OpenDelim(token::Paren) => {
let mut es = try!(self.parse_unspanned_seq(
&token::OpenDelim(token::Paren),
&token::CloseDelim(token::Paren),
seq_sep_trailing_allowed(token::Comma),
|p| Ok(try!(p.parse_expr_nopanic()))
));
hi = self.last_span.hi;
es.insert(0, e);
let id = spanned(dot, hi, i);
let nd = self.mk_method_call(id, tys, es);
e = self.mk_expr(lo, hi, nd);
}
_ => {
if !tys.is_empty() {
let last_span = self.last_span;
self.span_err(last_span,
"field expressions may not \
have type parameters");
}
let id = spanned(dot, hi, i);
let field = self.mk_field(e, id);
e = self.mk_expr(lo, hi, field);
}
}
}
token::Literal(token::Integer(n), suf) => {
let sp = self.span;
// A tuple index may not have a suffix
self.expect_no_suffix(sp, "tuple index", suf);
let dot = self.last_span.hi;
hi = self.span.hi;
try!(self.bump());
let index = n.as_str().parse::<usize>().ok();
match index {
Some(n) => {
let id = spanned(dot, hi, n);
let field = self.mk_tup_field(e, id);
e = self.mk_expr(lo, hi, field);
}
None => {
let last_span = self.last_span;
self.span_err(last_span, "invalid tuple or tuple struct index");
}
}
}
token::Literal(token::Float(n), _suf) => {
try!(self.bump());
let last_span = self.last_span;
let fstr = n.as_str();
self.span_err(last_span,
&format!("unexpected token: `{}`", n.as_str()));
if fstr.chars().all(|x| "0123456789.".contains(x)) {
let float = match fstr.parse::<f64>().ok() {
Some(f) => f,
None => continue,
};
self.fileline_help(last_span,
&format!("try parenthesizing the first index; e.g., `(foo.{}){}`",
float.trunc() as usize,
&float.fract().to_string()[1..]));
}
self.abort_if_errors();
}
_ => return Err(self.unexpected())
}
continue;
}
if self.expr_is_complete(&*e) { break; }
match self.token {
// expr(...)
token::OpenDelim(token::Paren) => {
let es = try!(self.parse_unspanned_seq(
&token::OpenDelim(token::Paren),
&token::CloseDelim(token::Paren),
seq_sep_trailing_allowed(token::Comma),
|p| Ok(try!(p.parse_expr_nopanic()))
));
hi = self.last_span.hi;
let nd = self.mk_call(e, es);
e = self.mk_expr(lo, hi, nd);
}
// expr[...]
// Could be either an index expression or a slicing expression.
token::OpenDelim(token::Bracket) => {
try!(self.bump());
let ix = try!(self.parse_expr_nopanic());
hi = self.span.hi;
try!(self.commit_expr_expecting(&*ix, token::CloseDelim(token::Bracket)));
let index = self.mk_index(e, ix);
e = self.mk_expr(lo, hi, index)
}
_ => return Ok(e)
}
}
return Ok(e);
}
// Parse unquoted tokens after a `$` in a token tree
fn parse_unquoted(&mut self) -> PResult<TokenTree> {
let mut sp = self.span;
let (name, namep) = match self.token {
token::Dollar => {
try!(self.bump());
if self.token == token::OpenDelim(token::Paren) {
let Spanned { node: seq, span: seq_span } = try!(self.parse_seq(
&token::OpenDelim(token::Paren),
&token::CloseDelim(token::Paren),
seq_sep_none(),
|p| p.parse_token_tree()
));
let (sep, repeat) = try!(self.parse_sep_and_kleene_op());
let name_num = macro_parser::count_names(&seq);
return Ok(TtSequence(mk_sp(sp.lo, seq_span.hi),
Rc::new(SequenceRepetition {
tts: seq,
separator: sep,
op: repeat,
num_captures: name_num
})));
} else if self.token.is_keyword_allow_following_colon(keywords::Crate) {
try!(self.bump());
return Ok(TtToken(sp, SpecialVarNt(SpecialMacroVar::CrateMacroVar)));
} else {
sp = mk_sp(sp.lo, self.span.hi);
let namep = match self.token { token::Ident(_, p) => p, _ => token::Plain };
let name = try!(self.parse_ident());
(name, namep)
}
}
token::SubstNt(name, namep) => {
try!(self.bump());
(name, namep)
}
_ => unreachable!()
};
// continue by trying to parse the `:ident` after `$name`
if self.token == token::Colon && self.look_ahead(1, |t| t.is_ident() &&
!t.is_strict_keyword() &&
!t.is_reserved_keyword()) {
try!(self.bump());
sp = mk_sp(sp.lo, self.span.hi);
let kindp = match self.token { token::Ident(_, p) => p, _ => token::Plain };
let nt_kind = try!(self.parse_ident());
Ok(TtToken(sp, MatchNt(name, nt_kind, namep, kindp)))
} else {
Ok(TtToken(sp, SubstNt(name, namep)))
}
}
pub fn check_unknown_macro_variable(&mut self) -> PResult<()> {
if self.quote_depth == 0 {
match self.token {
token::SubstNt(name, _) =>
return Err(self.fatal(&format!("unknown macro variable `{}`",
token::get_ident(name)))),
_ => {}
}
}
Ok(())
}
/// Parse an optional separator followed by a Kleene-style
/// repetition token (+ or *).
pub fn parse_sep_and_kleene_op(&mut self) -> PResult<(Option<token::Token>, ast::KleeneOp)> {
fn parse_kleene_op(parser: &mut Parser) -> PResult<Option<ast::KleeneOp>> {
match parser.token {
token::BinOp(token::Star) => {
try!(parser.bump());
Ok(Some(ast::ZeroOrMore))
},
token::BinOp(token::Plus) => {
try!(parser.bump());
Ok(Some(ast::OneOrMore))
},
_ => Ok(None)
}
};
match try!(parse_kleene_op(self)) {
Some(kleene_op) => return Ok((None, kleene_op)),
None => {}
}
let separator = try!(self.bump_and_get());
match try!(parse_kleene_op(self)) {
Some(zerok) => Ok((Some(separator), zerok)),
None => return Err(self.fatal("expected `*` or `+`"))
}
}
/// parse a single token tree from the input.
pub fn parse_token_tree(&mut self) -> PResult<TokenTree> {
// FIXME #6994: currently, this is too eager. It
// parses token trees but also identifies TtSequence's
// and token::SubstNt's; it's too early to know yet
// whether something will be a nonterminal or a seq
// yet.
maybe_whole!(deref self, NtTT);
// this is the fall-through for the 'match' below.
// invariants: the current token is not a left-delimiter,
// not an EOF, and not the desired right-delimiter (if
// it were, parse_seq_to_before_end would have prevented
// reaching this point.
fn parse_non_delim_tt_tok(p: &mut Parser) -> PResult<TokenTree> {
maybe_whole!(deref p, NtTT);
match p.token {
token::CloseDelim(_) => {
// This is a conservative error: only report the last unclosed delimiter. The
// previous unclosed delimiters could actually be closed! The parser just hasn't
// gotten to them yet.
match p.open_braces.last() {
None => {}
Some(&sp) => p.span_note(sp, "unclosed delimiter"),
};
let token_str = p.this_token_to_string();
Err(p.fatal(&format!("incorrect close delimiter: `{}`",
token_str)))
},
/* we ought to allow different depths of unquotation */
token::Dollar | token::SubstNt(..) if p.quote_depth > 0 => {
p.parse_unquoted()
}
_ => {
Ok(TtToken(p.span, try!(p.bump_and_get())))
}
}
}
match self.token {
token::Eof => {
let open_braces = self.open_braces.clone();
for sp in &open_braces {
self.span_help(*sp, "did you mean to close this delimiter?");
}
// There shouldn't really be a span, but it's easier for the test runner
// if we give it one
return Err(self.fatal("this file contains an un-closed delimiter "));
},
token::OpenDelim(delim) => {
// The span for beginning of the delimited section
let pre_span = self.span;
// Parse the open delimiter.
self.open_braces.push(self.span);
let open_span = self.span;
try!(self.bump());
// Parse the token trees within the delimiters
let tts = try!(self.parse_seq_to_before_end(
&token::CloseDelim(delim),
seq_sep_none(),
|p| p.parse_token_tree()
));
// Parse the close delimiter.
let close_span = self.span;
try!(self.bump());
self.open_braces.pop().unwrap();
// Expand to cover the entire delimited token tree
let span = Span { hi: close_span.hi, ..pre_span };
Ok(TtDelimited(span, Rc::new(Delimited {
delim: delim,
open_span: open_span,
tts: tts,
close_span: close_span,
})))
},
_ => parse_non_delim_tt_tok(self),
}
}
// parse a stream of tokens into a list of TokenTree's,
// up to EOF.
pub fn parse_all_token_trees(&mut self) -> PResult<Vec<TokenTree>> {
let mut tts = Vec::new();
while self.token != token::Eof {
tts.push(try!(self.parse_token_tree()));
}
Ok(tts)
}
/// Parse a prefix-operator expr
pub fn parse_prefix_expr(&mut self) -> PResult<P<Expr>> {
let lo = self.span.lo;
let hi;
// Note: when adding new unary operators, don't forget to adjust Token::can_begin_expr()
let ex;
match self.token {
token::Not => {
try!(self.bump());
let e = try!(self.parse_prefix_expr());
hi = e.span.hi;
ex = self.mk_unary(UnNot, e);
}
token::BinOp(token::Minus) => {
try!(self.bump());
let e = try!(self.parse_prefix_expr());
hi = e.span.hi;
ex = self.mk_unary(UnNeg, e);
}
token::BinOp(token::Star) => {
try!(self.bump());
let e = try!(self.parse_prefix_expr());
hi = e.span.hi;
ex = self.mk_unary(UnDeref, e);
}
token::BinOp(token::And) | token::AndAnd => {
try!(self.expect_and());
let m = try!(self.parse_mutability());
let e = try!(self.parse_prefix_expr());
hi = e.span.hi;
ex = ExprAddrOf(m, e);
}
token::Ident(_, _) => {
if !self.check_keyword(keywords::Box) {
return self.parse_dot_or_call_expr();
}
let lo = self.span.lo;
let box_hi = self.span.hi;
try!(self.bump());
// Check for a place: `box(PLACE) EXPR`.
if try!(self.eat(&token::OpenDelim(token::Paren)) ){
// Support `box() EXPR` as the default.
if !try!(self.eat(&token::CloseDelim(token::Paren)) ){
let place = try!(self.parse_expr_nopanic());
try!(self.expect(&token::CloseDelim(token::Paren)));
// Give a suggestion to use `box()` when a parenthesised expression is used
if !self.token.can_begin_expr() {
let span = self.span;
let this_token_to_string = self.this_token_to_string();
self.span_err(span,
&format!("expected expression, found `{}`",
this_token_to_string));
let box_span = mk_sp(lo, box_hi);
self.span_suggestion(box_span,
"try using `box()` instead:",
"box()".to_string());
self.abort_if_errors();
}
let subexpression = try!(self.parse_prefix_expr());
hi = subexpression.span.hi;
ex = ExprBox(Some(place), subexpression);
return Ok(self.mk_expr(lo, hi, ex));
}
}
// Otherwise, we use the unique pointer default.
let subexpression = try!(self.parse_prefix_expr());
hi = subexpression.span.hi;
// FIXME (pnkfelix): After working out kinks with box
// desugaring, should be `ExprBox(None, subexpression)`
// instead.
ex = self.mk_unary(UnUniq, subexpression);
}
_ => return self.parse_dot_or_call_expr()
}
return Ok(self.mk_expr(lo, hi, ex));
}
/// Parse an expression of binops
pub fn parse_binops(&mut self) -> PResult<P<Expr>> {
let prefix_expr = try!(self.parse_prefix_expr());
self.parse_more_binops(prefix_expr, 0)
}
/// Parse an expression of binops of at least min_prec precedence
pub fn parse_more_binops(&mut self, lhs: P<Expr>, min_prec: usize) -> PResult<P<Expr>> {
if self.expr_is_complete(&*lhs) { return Ok(lhs); }
self.expected_tokens.push(TokenType::Operator);
let cur_op_span = self.span;
let cur_opt = self.token.to_binop();
match cur_opt {
Some(cur_op) => {
if ast_util::is_comparison_binop(cur_op) {
self.check_no_chained_comparison(&*lhs, cur_op)
}
let cur_prec = operator_prec(cur_op);
if cur_prec >= min_prec {
try!(self.bump());
let expr = try!(self.parse_prefix_expr());
let rhs = try!(self.parse_more_binops(expr, cur_prec + 1));
let lhs_span = lhs.span;
let rhs_span = rhs.span;
let binary = self.mk_binary(codemap::respan(cur_op_span, cur_op), lhs, rhs);
let bin = self.mk_expr(lhs_span.lo, rhs_span.hi, binary);
self.parse_more_binops(bin, min_prec)
} else {
Ok(lhs)
}
}
None => {
if AS_PREC >= min_prec && try!(self.eat_keyword_noexpect(keywords::As) ){
let rhs = try!(self.parse_ty_nopanic());
let _as = self.mk_expr(lhs.span.lo,
rhs.span.hi,
ExprCast(lhs, rhs));
self.parse_more_binops(_as, min_prec)
} else {
Ok(lhs)
}
}
}
}
/// Produce an error if comparison operators are chained (RFC #558).
/// We only need to check lhs, not rhs, because all comparison ops
/// have same precedence and are left-associative
fn check_no_chained_comparison(&mut self, lhs: &Expr, outer_op: ast::BinOp_) {
debug_assert!(ast_util::is_comparison_binop(outer_op));
match lhs.node {
ExprBinary(op, _, _) if ast_util::is_comparison_binop(op.node) => {
// respan to include both operators
let op_span = mk_sp(op.span.lo, self.span.hi);
self.span_err(op_span,
"chained comparison operators require parentheses");
if op.node == BiLt && outer_op == BiGt {
self.fileline_help(op_span,
"use `::<...>` instead of `<...>` if you meant to specify type arguments");
}
}
_ => {}
}
}
/// Parse an assignment expression....
/// actually, this seems to be the main entry point for
/// parsing an arbitrary expression.
pub fn parse_assign_expr(&mut self) -> PResult<P<Expr>> {
match self.token {
token::DotDot => {
// prefix-form of range notation '..expr'
// This has the same precedence as assignment expressions
// (much lower than other prefix expressions) to be consistent
// with the postfix-form 'expr..'
let lo = self.span.lo;
try!(self.bump());
let opt_end = if self.is_at_start_of_range_notation_rhs() {
let end = try!(self.parse_binops());
Some(end)
} else {
None
};
let hi = self.span.hi;
let ex = self.mk_range(None, opt_end);
Ok(self.mk_expr(lo, hi, ex))
}
_ => {
let lhs = try!(self.parse_binops());
self.parse_assign_expr_with(lhs)
}
}
}
pub fn parse_assign_expr_with(&mut self, lhs: P<Expr>) -> PResult<P<Expr>> {
let restrictions = self.restrictions & RESTRICTION_NO_STRUCT_LITERAL;
let op_span = self.span;
match self.token {
token::Eq => {
try!(self.bump());
let rhs = try!(self.parse_expr_res(restrictions));
Ok(self.mk_expr(lhs.span.lo, rhs.span.hi, ExprAssign(lhs, rhs)))
}
token::BinOpEq(op) => {
try!(self.bump());
let rhs = try!(self.parse_expr_res(restrictions));
let aop = match op {
token::Plus => BiAdd,
token::Minus => BiSub,
token::Star => BiMul,
token::Slash => BiDiv,
token::Percent => BiRem,
token::Caret => BiBitXor,
token::And => BiBitAnd,
token::Or => BiBitOr,
token::Shl => BiShl,
token::Shr => BiShr
};
let rhs_span = rhs.span;
let span = lhs.span;
let assign_op = self.mk_assign_op(codemap::respan(op_span, aop), lhs, rhs);
Ok(self.mk_expr(span.lo, rhs_span.hi, assign_op))
}
// A range expression, either `expr..expr` or `expr..`.
token::DotDot => {
try!(self.bump());
let opt_end = if self.is_at_start_of_range_notation_rhs() {
let end = try!(self.parse_binops());
Some(end)
} else {
None
};
let lo = lhs.span.lo;
let hi = self.span.hi;
let range = self.mk_range(Some(lhs), opt_end);
return Ok(self.mk_expr(lo, hi, range));
}
_ => {
Ok(lhs)
}
}
}
fn is_at_start_of_range_notation_rhs(&self) -> bool {
if self.token.can_begin_expr() {
// parse `for i in 1.. { }` as infinite loop, not as `for i in (1..{})`.
if self.token == token::OpenDelim(token::Brace) {
return !self.restrictions.contains(RESTRICTION_NO_STRUCT_LITERAL);
}
true
} else {
false
}
}
/// Parse an 'if' or 'if let' expression ('if' token already eaten)
pub fn parse_if_expr(&mut self) -> PResult<P<Expr>> {
if self.check_keyword(keywords::Let) {
return self.parse_if_let_expr();
}
let lo = self.last_span.lo;
let cond = try!(self.parse_expr_res(RESTRICTION_NO_STRUCT_LITERAL));
let thn = try!(self.parse_block());
let mut els: Option<P<Expr>> = None;
let mut hi = thn.span.hi;
if try!(self.eat_keyword(keywords::Else) ){
let elexpr = try!(self.parse_else_expr());
hi = elexpr.span.hi;
els = Some(elexpr);
}
Ok(self.mk_expr(lo, hi, ExprIf(cond, thn, els)))
}
/// Parse an 'if let' expression ('if' token already eaten)
pub fn parse_if_let_expr(&mut self) -> PResult<P<Expr>> {
let lo = self.last_span.lo;
try!(self.expect_keyword(keywords::Let));
let pat = try!(self.parse_pat_nopanic());
try!(self.expect(&token::Eq));
let expr = try!(self.parse_expr_res(RESTRICTION_NO_STRUCT_LITERAL));
let thn = try!(self.parse_block());
let (hi, els) = if try!(self.eat_keyword(keywords::Else) ){
let expr = try!(self.parse_else_expr());
(expr.span.hi, Some(expr))
} else {
(thn.span.hi, None)
};
Ok(self.mk_expr(lo, hi, ExprIfLet(pat, expr, thn, els)))
}
// `|args| expr`
pub fn parse_lambda_expr(&mut self, capture_clause: CaptureClause)
-> PResult<P<Expr>>
{
let lo = self.span.lo;
let decl = try!(self.parse_fn_block_decl());
let body = match decl.output {
DefaultReturn(_) => {
// If no explicit return type is given, parse any
// expr and wrap it up in a dummy block:
let body_expr = try!(self.parse_expr_nopanic());
P(ast::Block {
id: ast::DUMMY_NODE_ID,
stmts: vec![],
span: body_expr.span,
expr: Some(body_expr),
rules: DefaultBlock,
})
}
_ => {
// If an explicit return type is given, require a
// block to appear (RFC 968).
try!(self.parse_block())
}
};
Ok(self.mk_expr(
lo,
body.span.hi,
ExprClosure(capture_clause, decl, body)))
}
pub fn parse_else_expr(&mut self) -> PResult<P<Expr>> {
if try!(self.eat_keyword(keywords::If) ){
return self.parse_if_expr();
} else {
let blk = try!(self.parse_block());
return Ok(self.mk_expr(blk.span.lo, blk.span.hi, ExprBlock(blk)));
}
}
/// Parse a 'for' .. 'in' expression ('for' token already eaten)
pub fn parse_for_expr(&mut self, opt_ident: Option<ast::Ident>) -> PResult<P<Expr>> {
// Parse: `for <src_pat> in <src_expr> <src_loop_block>`
let lo = self.last_span.lo;
let pat = try!(self.parse_pat_nopanic());
try!(self.expect_keyword(keywords::In));
let expr = try!(self.parse_expr_res(RESTRICTION_NO_STRUCT_LITERAL));
let loop_block = try!(self.parse_block());
let hi = self.last_span.hi;
Ok(self.mk_expr(lo, hi, ExprForLoop(pat, expr, loop_block, opt_ident)))
}
/// Parse a 'while' or 'while let' expression ('while' token already eaten)
pub fn parse_while_expr(&mut self, opt_ident: Option<ast::Ident>) -> PResult<P<Expr>> {
if self.token.is_keyword(keywords::Let) {
return self.parse_while_let_expr(opt_ident);
}
let lo = self.last_span.lo;
let cond = try!(self.parse_expr_res(RESTRICTION_NO_STRUCT_LITERAL));
let body = try!(self.parse_block());
let hi = body.span.hi;
return Ok(self.mk_expr(lo, hi, ExprWhile(cond, body, opt_ident)));
}
/// Parse a 'while let' expression ('while' token already eaten)
pub fn parse_while_let_expr(&mut self, opt_ident: Option<ast::Ident>) -> PResult<P<Expr>> {
let lo = self.last_span.lo;
try!(self.expect_keyword(keywords::Let));
let pat = try!(self.parse_pat_nopanic());
try!(self.expect(&token::Eq));
let expr = try!(self.parse_expr_res(RESTRICTION_NO_STRUCT_LITERAL));
let body = try!(self.parse_block());
let hi = body.span.hi;
return Ok(self.mk_expr(lo, hi, ExprWhileLet(pat, expr, body, opt_ident)));
}
pub fn parse_loop_expr(&mut self, opt_ident: Option<ast::Ident>) -> PResult<P<Expr>> {
let lo = self.last_span.lo;
let body = try!(self.parse_block());
let hi = body.span.hi;
Ok(self.mk_expr(lo, hi, ExprLoop(body, opt_ident)))
}
fn parse_match_expr(&mut self) -> PResult<P<Expr>> {
let lo = self.last_span.lo;
let discriminant = try!(self.parse_expr_res(RESTRICTION_NO_STRUCT_LITERAL));
try!(self.commit_expr_expecting(&*discriminant, token::OpenDelim(token::Brace)));
let mut arms: Vec<Arm> = Vec::new();
while self.token != token::CloseDelim(token::Brace) {
arms.push(try!(self.parse_arm_nopanic()));
}
let hi = self.span.hi;
try!(self.bump());
return Ok(self.mk_expr(lo, hi, ExprMatch(discriminant, arms, MatchSource::Normal)));
}
pub fn parse_arm_nopanic(&mut self) -> PResult<Arm> {
maybe_whole!(no_clone self, NtArm);
let attrs = self.parse_outer_attributes();
let pats = try!(self.parse_pats());
let mut guard = None;
if try!(self.eat_keyword(keywords::If) ){
guard = Some(try!(self.parse_expr_nopanic()));
}
try!(self.expect(&token::FatArrow));
let expr = try!(self.parse_expr_res(RESTRICTION_STMT_EXPR));
let require_comma =
!classify::expr_is_simple_block(&*expr)
&& self.token != token::CloseDelim(token::Brace);
if require_comma {
try!(self.commit_expr(&*expr, &[token::Comma], &[token::CloseDelim(token::Brace)]));
} else {
try!(self.eat(&token::Comma));
}
Ok(ast::Arm {
attrs: attrs,
pats: pats,
guard: guard,
body: expr,
})
}
/// Parse an expression
pub fn parse_expr_nopanic(&mut self) -> PResult<P<Expr>> {
return self.parse_expr_res(UNRESTRICTED);
}
/// Parse an expression, subject to the given restrictions
pub fn parse_expr_res(&mut self, r: Restrictions) -> PResult<P<Expr>> {
let old = self.restrictions;
self.restrictions = r;
let e = try!(self.parse_assign_expr());
self.restrictions = old;
return Ok(e);
}
/// Parse the RHS of a local variable declaration (e.g. '= 14;')
fn parse_initializer(&mut self) -> PResult<Option<P<Expr>>> {
if self.check(&token::Eq) {
try!(self.bump());
Ok(Some(try!(self.parse_expr_nopanic())))
} else {
Ok(None)
}
}
/// Parse patterns, separated by '|' s
fn parse_pats(&mut self) -> PResult<Vec<P<Pat>>> {
let mut pats = Vec::new();
loop {
pats.push(try!(self.parse_pat_nopanic()));
if self.check(&token::BinOp(token::Or)) { try!(self.bump());}
else { return Ok(pats); }
};
}
fn parse_pat_tuple_elements(&mut self) -> PResult<Vec<P<Pat>>> {
let mut fields = vec![];
if !self.check(&token::CloseDelim(token::Paren)) {
fields.push(try!(self.parse_pat_nopanic()));
if self.look_ahead(1, |t| *t != token::CloseDelim(token::Paren)) {
while try!(self.eat(&token::Comma)) &&
!self.check(&token::CloseDelim(token::Paren)) {
fields.push(try!(self.parse_pat_nopanic()));
}
}
if fields.len() == 1 {
try!(self.expect(&token::Comma));
}
}
Ok(fields)
}
fn parse_pat_vec_elements(
&mut self,
) -> PResult<(Vec<P<Pat>>, Option<P<Pat>>, Vec<P<Pat>>)> {
let mut before = Vec::new();
let mut slice = None;
let mut after = Vec::new();
let mut first = true;
let mut before_slice = true;
while self.token != token::CloseDelim(token::Bracket) {
if first {
first = false;
} else {
try!(self.expect(&token::Comma));
if self.token == token::CloseDelim(token::Bracket)
&& (before_slice || !after.is_empty()) {
break
}
}
if before_slice {
if self.check(&token::DotDot) {
try!(self.bump());
if self.check(&token::Comma) ||
self.check(&token::CloseDelim(token::Bracket)) {
slice = Some(P(ast::Pat {
id: ast::DUMMY_NODE_ID,
node: PatWild(PatWildMulti),
span: self.span,
}));
before_slice = false;
}
continue
}
}
let subpat = try!(self.parse_pat_nopanic());
if before_slice && self.check(&token::DotDot) {
try!(self.bump());
slice = Some(subpat);
before_slice = false;
} else if before_slice {
before.push(subpat);
} else {
after.push(subpat);
}
}
Ok((before, slice, after))
}
/// Parse the fields of a struct-like pattern
fn parse_pat_fields(&mut self) -> PResult<(Vec<codemap::Spanned<ast::FieldPat>> , bool)> {
let mut fields = Vec::new();
let mut etc = false;
let mut first = true;
while self.token != token::CloseDelim(token::Brace) {
if first {
first = false;
} else {
try!(self.expect(&token::Comma));
// accept trailing commas
if self.check(&token::CloseDelim(token::Brace)) { break }
}
let lo = self.span.lo;
let hi;
if self.check(&token::DotDot) {
try!(self.bump());
if self.token != token::CloseDelim(token::Brace) {
let token_str = self.this_token_to_string();
return Err(self.fatal(&format!("expected `{}`, found `{}`", "}",
token_str)))
}
etc = true;
break;
}
// Check if a colon exists one ahead. This means we're parsing a fieldname.
let (subpat, fieldname, is_shorthand) = if self.look_ahead(1, |t| t == &token::Colon) {
// Parsing a pattern of the form "fieldname: pat"
let fieldname = try!(self.parse_ident());
try!(self.bump());
let pat = try!(self.parse_pat_nopanic());
hi = pat.span.hi;
(pat, fieldname, false)
} else {
// Parsing a pattern of the form "(box) (ref) (mut) fieldname"
let is_box = try!(self.eat_keyword(keywords::Box));
let boxed_span_lo = self.span.lo;
let is_ref = try!(self.eat_keyword(keywords::Ref));
let is_mut = try!(self.eat_keyword(keywords::Mut));
let fieldname = try!(self.parse_ident());
hi = self.last_span.hi;
let bind_type = match (is_ref, is_mut) {
(true, true) => BindByRef(MutMutable),
(true, false) => BindByRef(MutImmutable),
(false, true) => BindByValue(MutMutable),
(false, false) => BindByValue(MutImmutable),
};
let fieldpath = codemap::Spanned{span:self.last_span, node:fieldname};
let fieldpat = P(ast::Pat{
id: ast::DUMMY_NODE_ID,
node: PatIdent(bind_type, fieldpath, None),
span: mk_sp(boxed_span_lo, hi),
});
let subpat = if is_box {
P(ast::Pat{
id: ast::DUMMY_NODE_ID,
node: PatBox(fieldpat),
span: mk_sp(lo, hi),
})
} else {
fieldpat
};
(subpat, fieldname, true)
};
fields.push(codemap::Spanned { span: mk_sp(lo, hi),
node: ast::FieldPat { ident: fieldname,
pat: subpat,
is_shorthand: is_shorthand }});
}
return Ok((fields, etc));
}
fn parse_pat_range_end(&mut self) -> PResult<P<Expr>> {
if self.is_path_start() {
let lo = self.span.lo;
let (qself, path) = if try!(self.eat_lt()) {
// Parse a qualified path
let (qself, path) =
try!(self.parse_qualified_path(QPathParsingMode::NoParameters));
(Some(qself), path)
} else {
// Parse an unqualified path
(None, try!(self.parse_path(LifetimeAndTypesWithColons)))
};
let hi = self.last_span.hi;
Ok(self.mk_expr(lo, hi, ExprPath(qself, path)))
} else {
self.parse_literal_maybe_minus()
}
}
fn is_path_start(&self) -> bool {
(self.token == token::Lt || self.token == token::ModSep
|| self.token.is_ident() || self.token.is_path())
&& !self.token.is_keyword(keywords::True) && !self.token.is_keyword(keywords::False)
}
/// Parse a pattern.
pub fn parse_pat_nopanic(&mut self) -> PResult<P<Pat>> {
maybe_whole!(self, NtPat);
let lo = self.span.lo;
let pat;
match self.token {
token::Underscore => {
// Parse _
try!(self.bump());
pat = PatWild(PatWildSingle);
}
token::BinOp(token::And) | token::AndAnd => {
// Parse &pat / &mut pat
try!(self.expect_and());
let mutbl = try!(self.parse_mutability());
let subpat = try!(self.parse_pat_nopanic());
pat = PatRegion(subpat, mutbl);
}
token::OpenDelim(token::Paren) => {
// Parse (pat,pat,pat,...) as tuple pattern
try!(self.bump());
let fields = try!(self.parse_pat_tuple_elements());
try!(self.expect(&token::CloseDelim(token::Paren)));
pat = PatTup(fields);
}
token::OpenDelim(token::Bracket) => {
// Parse [pat,pat,...] as vector pattern
try!(self.bump());
let (before, slice, after) = try!(self.parse_pat_vec_elements());
try!(self.expect(&token::CloseDelim(token::Bracket)));
pat = PatVec(before, slice, after);
}
_ => {
// At this point, token != _, &, &&, (, [
if try!(self.eat_keyword(keywords::Mut)) {
// Parse mut ident @ pat
pat = try!(self.parse_pat_ident(BindByValue(MutMutable)));
} else if try!(self.eat_keyword(keywords::Ref)) {
// Parse ref ident @ pat / ref mut ident @ pat
let mutbl = try!(self.parse_mutability());
pat = try!(self.parse_pat_ident(BindByRef(mutbl)));
} else if try!(self.eat_keyword(keywords::Box)) {
// Parse box pat
let subpat = try!(self.parse_pat_nopanic());
pat = PatBox(subpat);
} else if self.is_path_start() {
// Parse pattern starting with a path
if self.token.is_plain_ident() && self.look_ahead(1, |t| *t != token::DotDotDot &&
*t != token::OpenDelim(token::Brace) &&
*t != token::OpenDelim(token::Paren) &&
// Contrary to its definition, a plain ident can be followed by :: in macros
*t != token::ModSep) {
// Plain idents have some extra abilities here compared to general paths
if self.look_ahead(1, |t| *t == token::Not) {
// Parse macro invocation
let ident = try!(self.parse_ident());
let ident_span = self.last_span;
let path = ident_to_path(ident_span, ident);
try!(self.bump());
let delim = try!(self.expect_open_delim());
let tts = try!(self.parse_seq_to_end(&token::CloseDelim(delim),
seq_sep_none(), |p| p.parse_token_tree()));
let mac = MacInvocTT(path, tts, EMPTY_CTXT);
pat = PatMac(codemap::Spanned {node: mac, span: self.span});
} else {
// Parse ident @ pat
// This can give false positives and parse nullary enums,
// they are dealt with later in resolve
pat = try!(self.parse_pat_ident(BindByValue(MutImmutable)));
}
} else {
let (qself, path) = if try!(self.eat_lt()) {
// Parse a qualified path
let (qself, path) =
try!(self.parse_qualified_path(QPathParsingMode::NoParameters));
(Some(qself), path)
} else {
// Parse an unqualified path
(None, try!(self.parse_path(LifetimeAndTypesWithColons)))
};
match self.token {
token::DotDotDot => {
// Parse range
let hi = self.last_span.hi;
let begin = self.mk_expr(lo, hi, ExprPath(qself, path));
try!(self.bump());
let end = try!(self.parse_pat_range_end());
pat = PatRange(begin, end);
}
token::OpenDelim(token::Brace) => {
if qself.is_some() {
let span = self.span;
self.span_err(span,
"unexpected `{` after qualified path");
self.abort_if_errors();
}
// Parse struct pattern
try!(self.bump());
let (fields, etc) = try!(self.parse_pat_fields());
try!(self.bump());
pat = PatStruct(path, fields, etc);
}
token::OpenDelim(token::Paren) => {
if qself.is_some() {
let span = self.span;
self.span_err(span,
"unexpected `(` after qualified path");
self.abort_if_errors();
}
// Parse tuple struct or enum pattern
if self.look_ahead(1, |t| *t == token::DotDot) {
// This is a "top constructor only" pat
try!(self.bump());
try!(self.bump());
try!(self.expect(&token::CloseDelim(token::Paren)));
pat = PatEnum(path, None);
} else {
let args = try!(self.parse_enum_variant_seq(
&token::OpenDelim(token::Paren),
&token::CloseDelim(token::Paren),
seq_sep_trailing_allowed(token::Comma),
|p| p.parse_pat_nopanic()));
pat = PatEnum(path, Some(args));
}
}
_ if qself.is_some() => {
// Parse qualified path
pat = PatQPath(qself.unwrap(), path);
}
_ => {
// Parse nullary enum
pat = PatEnum(path, Some(vec![]));
}
}
}
} else {
// Try to parse everything else as literal with optional minus
let begin = try!(self.parse_literal_maybe_minus());
if try!(self.eat(&token::DotDotDot)) {
let end = try!(self.parse_pat_range_end());
pat = PatRange(begin, end);
} else {
pat = PatLit(begin);
}
}
}
}
let hi = self.last_span.hi;
Ok(P(ast::Pat {
id: ast::DUMMY_NODE_ID,
node: pat,
span: mk_sp(lo, hi),
}))
}
/// Parse ident or ident @ pat
/// used by the copy foo and ref foo patterns to give a good
/// error message when parsing mistakes like ref foo(a,b)
fn parse_pat_ident(&mut self,
binding_mode: ast::BindingMode)
-> PResult<ast::Pat_> {
if !self.token.is_plain_ident() {
let span = self.span;
let tok_str = self.this_token_to_string();
return Err(self.span_fatal(span,
&format!("expected identifier, found `{}`", tok_str)))
}
let ident = try!(self.parse_ident());
let last_span = self.last_span;
let name = codemap::Spanned{span: last_span, node: ident};
let sub = if try!(self.eat(&token::At) ){
Some(try!(self.parse_pat_nopanic()))
} else {
None
};
// just to be friendly, if they write something like
// ref Some(i)
// we end up here with ( as the current token. This shortly
// leads to a parse error. Note that if there is no explicit
// binding mode then we do not end up here, because the lookahead
// will direct us over to parse_enum_variant()
if self.token == token::OpenDelim(token::Paren) {
let last_span = self.last_span;
return Err(self.span_fatal(
last_span,
"expected identifier, found enum pattern"))
}
Ok(PatIdent(binding_mode, name, sub))
}
/// Parse a local variable declaration
fn parse_local(&mut self) -> PResult<P<Local>> {
let lo = self.span.lo;
let pat = try!(self.parse_pat_nopanic());
let mut ty = None;
if try!(self.eat(&token::Colon) ){
ty = Some(try!(self.parse_ty_sum()));
}
let init = try!(self.parse_initializer());
Ok(P(ast::Local {
ty: ty,
pat: pat,
init: init,
id: ast::DUMMY_NODE_ID,
span: mk_sp(lo, self.last_span.hi),
source: LocalLet,
}))
}
/// Parse a "let" stmt
fn parse_let(&mut self) -> PResult<P<Decl>> {
let lo = self.span.lo;
let local = try!(self.parse_local());
Ok(P(spanned(lo, self.last_span.hi, DeclLocal(local))))
}
/// Parse a structure field
fn parse_name_and_ty(&mut self, pr: Visibility,
attrs: Vec<Attribute> ) -> PResult<StructField> {
let lo = self.span.lo;
if !self.token.is_plain_ident() {
return Err(self.fatal("expected ident"));
}
let name = try!(self.parse_ident());
try!(self.expect(&token::Colon));
let ty = try!(self.parse_ty_sum());
Ok(spanned(lo, self.last_span.hi, ast::StructField_ {
kind: NamedField(name, pr),
id: ast::DUMMY_NODE_ID,
ty: ty,
attrs: attrs,
}))
}
/// Emit an expected item after attributes error.
fn expected_item_err(&self, attrs: &[Attribute]) {
let message = match attrs.last() {
Some(&Attribute { node: ast::Attribute_ { is_sugared_doc: true, .. }, .. }) => {
"expected item after doc comment"
}
_ => "expected item after attributes",
};
self.span_err(self.last_span, message);
}
/// Parse a statement. may include decl.
pub fn parse_stmt_nopanic(&mut self) -> PResult<Option<P<Stmt>>> {
Ok(try!(self.parse_stmt_()).map(P))
}
fn parse_stmt_(&mut self) -> PResult<Option<Stmt>> {
maybe_whole!(Some deref self, NtStmt);
fn check_expected_item(p: &mut Parser, attrs: &[Attribute]) {
// If we have attributes then we should have an item
if !attrs.is_empty() {
p.expected_item_err(attrs);
}
}
let attrs = self.parse_outer_attributes();
let lo = self.span.lo;
Ok(Some(if self.check_keyword(keywords::Let) {
check_expected_item(self, &attrs);
try!(self.expect_keyword(keywords::Let));
let decl = try!(self.parse_let());
spanned(lo, decl.span.hi, StmtDecl(decl, ast::DUMMY_NODE_ID))
} else if self.token.is_ident()
&& !self.token.is_any_keyword()
&& self.look_ahead(1, |t| *t == token::Not) {
// it's a macro invocation:
check_expected_item(self, &attrs);
// Potential trouble: if we allow macros with paths instead of
// idents, we'd need to look ahead past the whole path here...
let pth = try!(self.parse_path(NoTypesAllowed));
try!(self.bump());
let id = match self.token {
token::OpenDelim(_) => token::special_idents::invalid, // no special identifier
_ => try!(self.parse_ident()),
};
// check that we're pointing at delimiters (need to check
// again after the `if`, because of `parse_ident`
// consuming more tokens).
let delim = match self.token {
token::OpenDelim(delim) => delim,
_ => {
// we only expect an ident if we didn't parse one
// above.
let ident_str = if id.name == token::special_idents::invalid.name {
"identifier, "
} else {
""
};
let tok_str = self.this_token_to_string();
return Err(self.fatal(&format!("expected {}`(` or `{{`, found `{}`",
ident_str,
tok_str)))
},
};
let tts = try!(self.parse_unspanned_seq(
&token::OpenDelim(delim),
&token::CloseDelim(delim),
seq_sep_none(),
|p| p.parse_token_tree()
));
let hi = self.last_span.hi;
let style = if delim == token::Brace {
MacStmtWithBraces
} else {
MacStmtWithoutBraces
};
if id.name == token::special_idents::invalid.name {
spanned(lo, hi,
StmtMac(P(spanned(lo,
hi,
MacInvocTT(pth, tts, EMPTY_CTXT))),
style))
} else {
// if it has a special ident, it's definitely an item
//
// Require a semicolon or braces.
if style != MacStmtWithBraces {
if !try!(self.eat(&token::Semi) ){
let last_span = self.last_span;
self.span_err(last_span,
"macros that expand to items must \
either be surrounded with braces or \
followed by a semicolon");
}
}
spanned(lo, hi, StmtDecl(
P(spanned(lo, hi, DeclItem(
self.mk_item(
lo, hi, id /*id is good here*/,
ItemMac(spanned(lo, hi, MacInvocTT(pth, tts, EMPTY_CTXT))),
Inherited, Vec::new(/*no attrs*/))))),
ast::DUMMY_NODE_ID))
}
} else {
match try!(self.parse_item_(attrs, false)) {
Some(i) => {
let hi = i.span.hi;
let decl = P(spanned(lo, hi, DeclItem(i)));
spanned(lo, hi, StmtDecl(decl, ast::DUMMY_NODE_ID))
}
None => {
// Do not attempt to parse an expression if we're done here.
if self.token == token::Semi {
try!(self.bump());
return Ok(None);
}
if self.token == token::CloseDelim(token::Brace) {
return Ok(None);
}
// Remainder are line-expr stmts.
let e = try!(self.parse_expr_res(RESTRICTION_STMT_EXPR));
spanned(lo, e.span.hi, StmtExpr(e, ast::DUMMY_NODE_ID))
}
}
}))
}
/// Is this expression a successfully-parsed statement?
fn expr_is_complete(&mut self, e: &Expr) -> bool {
self.restrictions.contains(RESTRICTION_STMT_EXPR) &&
!classify::expr_requires_semi_to_be_stmt(e)
}
/// Parse a block. No inner attrs are allowed.
pub fn parse_block(&mut self) -> PResult<P<Block>> {
maybe_whole!(no_clone self, NtBlock);
let lo = self.span.lo;
if !try!(self.eat(&token::OpenDelim(token::Brace)) ){
let sp = self.span;
let tok = self.this_token_to_string();
return Err(self.span_fatal_help(sp,
&format!("expected `{{`, found `{}`", tok),
"place this code inside a block"));
}
self.parse_block_tail(lo, DefaultBlock)
}
/// Parse a block. Inner attrs are allowed.
fn parse_inner_attrs_and_block(&mut self) -> PResult<(Vec<Attribute>, P<Block>)> {
maybe_whole!(pair_empty self, NtBlock);
let lo = self.span.lo;
try!(self.expect(&token::OpenDelim(token::Brace)));
Ok((self.parse_inner_attributes(),
try!(self.parse_block_tail(lo, DefaultBlock))))
}
/// Parse the rest of a block expression or function body
/// Precondition: already parsed the '{'.
fn parse_block_tail(&mut self, lo: BytePos, s: BlockCheckMode) -> PResult<P<Block>> {
let mut stmts = vec![];
let mut expr = None;
while !try!(self.eat(&token::CloseDelim(token::Brace))) {
let Spanned {node, span} = if let Some(s) = try!(self.parse_stmt_()) {
s
} else {
// Found only `;` or `}`.
continue;
};
match node {
StmtExpr(e, _) => {
try!(self.handle_expression_like_statement(e, span, &mut stmts, &mut expr));
}
StmtMac(mac, MacStmtWithoutBraces) => {
// statement macro without braces; might be an
// expr depending on whether a semicolon follows
match self.token {
token::Semi => {
stmts.push(P(Spanned {
node: StmtMac(mac, MacStmtWithSemicolon),
span: mk_sp(span.lo, self.span.hi),
}));
try!(self.bump());
}
_ => {
let e = self.mk_mac_expr(span.lo, span.hi,
mac.and_then(|m| m.node));
let e = try!(self.parse_dot_or_call_expr_with(e));
let e = try!(self.parse_more_binops(e, 0));
let e = try!(self.parse_assign_expr_with(e));
try!(self.handle_expression_like_statement(
e,
span,
&mut stmts,
&mut expr));
}
}
}
StmtMac(m, style) => {
// statement macro; might be an expr
match self.token {
token::Semi => {
stmts.push(P(Spanned {
node: StmtMac(m, MacStmtWithSemicolon),
span: mk_sp(span.lo, self.span.hi),
}));
try!(self.bump());
}
token::CloseDelim(token::Brace) => {
// if a block ends in `m!(arg)` without
// a `;`, it must be an expr
expr = Some(self.mk_mac_expr(span.lo, span.hi,
m.and_then(|x| x.node)));
}
_ => {
stmts.push(P(Spanned {
node: StmtMac(m, style),
span: span
}));
}
}
}
_ => { // all other kinds of statements:
let mut hi = span.hi;
if classify::stmt_ends_with_semi(&node) {
try!(self.commit_stmt_expecting(token::Semi));
hi = self.last_span.hi;
}
stmts.push(P(Spanned {
node: node,
span: mk_sp(span.lo, hi)
}));
}
}
}
Ok(P(ast::Block {
stmts: stmts,
expr: expr,
id: ast::DUMMY_NODE_ID,
rules: s,
span: mk_sp(lo, self.last_span.hi),
}))
}
fn handle_expression_like_statement(
&mut self,
e: P<Expr>,
span: Span,
stmts: &mut Vec<P<Stmt>>,
last_block_expr: &mut Option<P<Expr>>) -> PResult<()> {
// expression without semicolon
if classify::expr_requires_semi_to_be_stmt(&*e) {
// Just check for errors and recover; do not eat semicolon yet.
try!(self.commit_stmt(&[],
&[token::Semi, token::CloseDelim(token::Brace)]));
}
match self.token {
token::Semi => {
try!(self.bump());
let span_with_semi = Span {
lo: span.lo,
hi: self.last_span.hi,
expn_id: span.expn_id,
};
stmts.push(P(Spanned {
node: StmtSemi(e, ast::DUMMY_NODE_ID),
span: span_with_semi,
}));
}
token::CloseDelim(token::Brace) => *last_block_expr = Some(e),
_ => {
stmts.push(P(Spanned {
node: StmtExpr(e, ast::DUMMY_NODE_ID),
span: span
}));
}
}
Ok(())
}
// Parses a sequence of bounds if a `:` is found,
// otherwise returns empty list.
fn parse_colon_then_ty_param_bounds(&mut self,
mode: BoundParsingMode)
-> PResult<OwnedSlice<TyParamBound>>
{
if !try!(self.eat(&token::Colon) ){
Ok(OwnedSlice::empty())
} else {
self.parse_ty_param_bounds(mode)
}
}
// matches bounds = ( boundseq )?
// where boundseq = ( polybound + boundseq ) | polybound
// and polybound = ( 'for' '<' 'region '>' )? bound
// and bound = 'region | trait_ref
fn parse_ty_param_bounds(&mut self,
mode: BoundParsingMode)
-> PResult<OwnedSlice<TyParamBound>>
{
let mut result = vec!();
loop {
let question_span = self.span;
let ate_question = try!(self.eat(&token::Question));
match self.token {
token::Lifetime(lifetime) => {
if ate_question {
self.span_err(question_span,
"`?` may only modify trait bounds, not lifetime bounds");
}
result.push(RegionTyParamBound(ast::Lifetime {
id: ast::DUMMY_NODE_ID,
span: self.span,
name: lifetime.name
}));
try!(self.bump());
}
token::ModSep | token::Ident(..) => {
let poly_trait_ref = try!(self.parse_poly_trait_ref());
let modifier = if ate_question {
if mode == BoundParsingMode::Modified {
TraitBoundModifier::Maybe
} else {
self.span_err(question_span,
"unexpected `?`");
TraitBoundModifier::None
}
} else {
TraitBoundModifier::None
};
result.push(TraitTyParamBound(poly_trait_ref, modifier))
}
_ => break,
}
if !try!(self.eat(&token::BinOp(token::Plus)) ){
break;
}
}
return Ok(OwnedSlice::from_vec(result));
}
/// Matches typaram = IDENT (`?` unbound)? optbounds ( EQ ty )?
fn parse_ty_param(&mut self) -> PResult<TyParam> {
let span = self.span;
let ident = try!(self.parse_ident());
let bounds = try!(self.parse_colon_then_ty_param_bounds(BoundParsingMode::Modified));
let default = if self.check(&token::Eq) {
try!(self.bump());
Some(try!(self.parse_ty_sum()))
} else {
None
};
Ok(TyParam {
ident: ident,
id: ast::DUMMY_NODE_ID,
bounds: bounds,
default: default,
span: span,
})
}
/// Parse a set of optional generic type parameter declarations. Where
/// clauses are not parsed here, and must be added later via
/// `parse_where_clause()`.
///
/// matches generics = ( ) | ( < > ) | ( < typaramseq ( , )? > ) | ( < lifetimes ( , )? > )
/// | ( < lifetimes , typaramseq ( , )? > )
/// where typaramseq = ( typaram ) | ( typaram , typaramseq )
pub fn parse_generics(&mut self) -> PResult<ast::Generics> {
if try!(self.eat(&token::Lt) ){
let lifetime_defs = try!(self.parse_lifetime_defs());
let mut seen_default = false;
let ty_params = try!(self.parse_seq_to_gt(Some(token::Comma), |p| {
try!(p.forbid_lifetime());
let ty_param = try!(p.parse_ty_param());
if ty_param.default.is_some() {
seen_default = true;
} else if seen_default {
let last_span = p.last_span;
p.span_err(last_span,
"type parameters with a default must be trailing");
}
Ok(ty_param)
}));
Ok(ast::Generics {
lifetimes: lifetime_defs,
ty_params: ty_params,
where_clause: WhereClause {
id: ast::DUMMY_NODE_ID,
predicates: Vec::new(),
}
})
} else {
Ok(ast_util::empty_generics())
}
}
fn parse_generic_values_after_lt(&mut self) -> PResult<(Vec<ast::Lifetime>,
Vec<P<Ty>>,
Vec<P<TypeBinding>>)> {
let span_lo = self.span.lo;
let lifetimes = try!(self.parse_lifetimes(token::Comma));
let missing_comma = !lifetimes.is_empty() &&
!self.token.is_like_gt() &&
self.last_token
.as_ref().map_or(true,
|x| &**x != &token::Comma);
if missing_comma {
let msg = format!("expected `,` or `>` after lifetime \
name, found `{}`",
self.this_token_to_string());
self.span_err(self.span, &msg);
let span_hi = self.span.hi;
let span_hi = if self.parse_ty_nopanic().is_ok() {
self.span.hi
} else {
span_hi
};
let msg = format!("did you mean a single argument type &'a Type, \
or did you mean the comma-separated arguments \
'a, Type?");
self.span_note(mk_sp(span_lo, span_hi), &msg);
self.abort_if_errors()
}
// First parse types.
let (types, returned) = try!(self.parse_seq_to_gt_or_return(
Some(token::Comma),
|p| {
try!(p.forbid_lifetime());
if p.look_ahead(1, |t| t == &token::Eq) {
Ok(None)
} else {
Ok(Some(try!(p.parse_ty_sum())))
}
}
));
// If we found the `>`, don't continue.
if !returned {
return Ok((lifetimes, types.into_vec(), Vec::new()));
}
// Then parse type bindings.
let bindings = try!(self.parse_seq_to_gt(
Some(token::Comma),
|p| {
try!(p.forbid_lifetime());
let lo = p.span.lo;
let ident = try!(p.parse_ident());
let found_eq = try!(p.eat(&token::Eq));
if !found_eq {
let span = p.span;
p.span_warn(span, "whoops, no =?");
}
let ty = try!(p.parse_ty_nopanic());
let hi = p.span.hi;
let span = mk_sp(lo, hi);
return Ok(P(TypeBinding{id: ast::DUMMY_NODE_ID,
ident: ident,
ty: ty,
span: span,
}));
}
));
Ok((lifetimes, types.into_vec(), bindings.into_vec()))
}
fn forbid_lifetime(&mut self) -> PResult<()> {
if self.token.is_lifetime() {
let span = self.span;
return Err(self.span_fatal(span, "lifetime parameters must be declared \
prior to type parameters"))
}
Ok(())
}
/// Parses an optional `where` clause and places it in `generics`.
///
/// ```
/// where T : Trait<U, V> + 'b, 'a : 'b
/// ```
pub fn parse_where_clause(&mut self) -> PResult<ast::WhereClause> {
let mut where_clause = WhereClause {
id: ast::DUMMY_NODE_ID,
predicates: Vec::new(),
};
if !try!(self.eat_keyword(keywords::Where)) {
return Ok(where_clause);
}
let mut parsed_something = false;
loop {
let lo = self.span.lo;
match self.token {
token::OpenDelim(token::Brace) => {
break
}
token::Lifetime(..) => {
let bounded_lifetime =
try!(self.parse_lifetime());
try!(self.eat(&token::Colon));
let bounds =
try!(self.parse_lifetimes(token::BinOp(token::Plus)));
let hi = self.last_span.hi;
let span = mk_sp(lo, hi);
where_clause.predicates.push(ast::WherePredicate::RegionPredicate(
ast::WhereRegionPredicate {
span: span,
lifetime: bounded_lifetime,
bounds: bounds
}
));
parsed_something = true;
}
_ => {
let bound_lifetimes = if try!(self.eat_keyword(keywords::For) ){
// Higher ranked constraint.
try!(self.expect(&token::Lt));
let lifetime_defs = try!(self.parse_lifetime_defs());
try!(self.expect_gt());
lifetime_defs
} else {
vec![]
};
let bounded_ty = try!(self.parse_ty_nopanic());
if try!(self.eat(&token::Colon) ){
let bounds = try!(self.parse_ty_param_bounds(BoundParsingMode::Bare));
let hi = self.last_span.hi;
let span = mk_sp(lo, hi);
if bounds.is_empty() {
self.span_err(span,
"each predicate in a `where` clause must have \
at least one bound in it");
}
where_clause.predicates.push(ast::WherePredicate::BoundPredicate(
ast::WhereBoundPredicate {
span: span,
bound_lifetimes: bound_lifetimes,
bounded_ty: bounded_ty,
bounds: bounds,
}));
parsed_something = true;
} else if try!(self.eat(&token::Eq) ){
// let ty = try!(self.parse_ty_nopanic());
let hi = self.last_span.hi;
let span = mk_sp(lo, hi);
// where_clause.predicates.push(
// ast::WherePredicate::EqPredicate(ast::WhereEqPredicate {
// id: ast::DUMMY_NODE_ID,
// span: span,
// path: panic!("NYI"), //bounded_ty,
// ty: ty,
// }));
// parsed_something = true;
// // FIXME(#18433)
self.span_err(span,
"equality constraints are not yet supported \
in where clauses (#20041)");
} else {
let last_span = self.last_span;
self.span_err(last_span,
"unexpected token in `where` clause");
}
}
};
if !try!(self.eat(&token::Comma) ){
break
}
}
if !parsed_something {
let last_span = self.last_span;
self.span_err(last_span,
"a `where` clause must have at least one predicate \
in it");
}
Ok(where_clause)
}
fn parse_fn_args(&mut self, named_args: bool, allow_variadic: bool)
-> PResult<(Vec<Arg> , bool)> {
let sp = self.span;
let mut args: Vec<Option<Arg>> =
try!(self.parse_unspanned_seq(
&token::OpenDelim(token::Paren),
&token::CloseDelim(token::Paren),
seq_sep_trailing_allowed(token::Comma),
|p| {
if p.token == token::DotDotDot {
try!(p.bump());
if allow_variadic {
if p.token != token::CloseDelim(token::Paren) {
let span = p.span;
return Err(p.span_fatal(span,
"`...` must be last in argument list for variadic function"))
}
} else {
let span = p.span;
return Err(p.span_fatal(span,
"only foreign functions are allowed to be variadic"))
}
Ok(None)
} else {
Ok(Some(try!(p.parse_arg_general(named_args))))
}
}
));
let variadic = match args.pop() {
Some(None) => true,
Some(x) => {
// Need to put back that last arg
args.push(x);
false
}
None => false
};
if variadic && args.is_empty() {
self.span_err(sp,
"variadic function must be declared with at least one named argument");
}
let args = args.into_iter().map(|x| x.unwrap()).collect();
Ok((args, variadic))
}
/// Parse the argument list and result type of a function declaration
pub fn parse_fn_decl(&mut self, allow_variadic: bool) -> PResult<P<FnDecl>> {
let (args, variadic) = try!(self.parse_fn_args(true, allow_variadic));
let ret_ty = try!(self.parse_ret_ty());
Ok(P(FnDecl {
inputs: args,
output: ret_ty,
variadic: variadic
}))
}
fn is_self_ident(&mut self) -> bool {
match self.token {
token::Ident(id, token::Plain) => id.name == special_idents::self_.name,
_ => false
}
}
fn expect_self_ident(&mut self) -> PResult<ast::Ident> {
match self.token {
token::Ident(id, token::Plain) if id.name == special_idents::self_.name => {
try!(self.bump());
Ok(id)
},
_ => {
let token_str = self.this_token_to_string();
return Err(self.fatal(&format!("expected `self`, found `{}`",
token_str)))
}
}
}
fn is_self_type_ident(&mut self) -> bool {
match self.token {
token::Ident(id, token::Plain) => id.name == special_idents::type_self.name,
_ => false
}
}
fn expect_self_type_ident(&mut self) -> PResult<ast::Ident> {
match self.token {
token::Ident(id, token::Plain) if id.name == special_idents::type_self.name => {
try!(self.bump());
Ok(id)
},
_ => {
let token_str = self.this_token_to_string();
Err(self.fatal(&format!("expected `Self`, found `{}`",
token_str)))
}
}
}
/// Parse the argument list and result type of a function
/// that may have a self type.
fn parse_fn_decl_with_self<F>(&mut self,
parse_arg_fn: F) -> PResult<(ExplicitSelf, P<FnDecl>)> where
F: FnMut(&mut Parser) -> PResult<Arg>,
{
fn maybe_parse_borrowed_explicit_self(this: &mut Parser)
-> PResult<ast::ExplicitSelf_> {
// The following things are possible to see here:
//
// fn(&mut self)
// fn(&mut self)
// fn(&'lt self)
// fn(&'lt mut self)
//
// We already know that the current token is `&`.
if this.look_ahead(1, |t| t.is_keyword(keywords::SelfValue)) {
try!(this.bump());
Ok(SelfRegion(None, MutImmutable, try!(this.expect_self_ident())))
} else if this.look_ahead(1, |t| t.is_mutability()) &&
this.look_ahead(2, |t| t.is_keyword(keywords::SelfValue)) {
try!(this.bump());
let mutability = try!(this.parse_mutability());
Ok(SelfRegion(None, mutability, try!(this.expect_self_ident())))
} else if this.look_ahead(1, |t| t.is_lifetime()) &&
this.look_ahead(2, |t| t.is_keyword(keywords::SelfValue)) {
try!(this.bump());
let lifetime = try!(this.parse_lifetime());
Ok(SelfRegion(Some(lifetime), MutImmutable, try!(this.expect_self_ident())))
} else if this.look_ahead(1, |t| t.is_lifetime()) &&
this.look_ahead(2, |t| t.is_mutability()) &&
this.look_ahead(3, |t| t.is_keyword(keywords::SelfValue)) {
try!(this.bump());
let lifetime = try!(this.parse_lifetime());
let mutability = try!(this.parse_mutability());
Ok(SelfRegion(Some(lifetime), mutability, try!(this.expect_self_ident())))
} else {
Ok(SelfStatic)
}
}
try!(self.expect(&token::OpenDelim(token::Paren)));
// A bit of complexity and lookahead is needed here in order to be
// backwards compatible.
let lo = self.span.lo;
let mut self_ident_lo = self.span.lo;
let mut self_ident_hi = self.span.hi;
let mut mutbl_self = MutImmutable;
let explicit_self = match self.token {
token::BinOp(token::And) => {
let eself = try!(maybe_parse_borrowed_explicit_self(self));
self_ident_lo = self.last_span.lo;
self_ident_hi = self.last_span.hi;
eself
}
token::BinOp(token::Star) => {
// Possibly "*self" or "*mut self" -- not supported. Try to avoid
// emitting cryptic "unexpected token" errors.
try!(self.bump());
let _mutability = if self.token.is_mutability() {
try!(self.parse_mutability())
} else {
MutImmutable
};
if self.is_self_ident() {
let span = self.span;
self.span_err(span, "cannot pass self by unsafe pointer");
try!(self.bump());
}
// error case, making bogus self ident:
SelfValue(special_idents::self_)
}
token::Ident(..) => {
if self.is_self_ident() {
let self_ident = try!(self.expect_self_ident());
// Determine whether this is the fully explicit form, `self:
// TYPE`.
if try!(self.eat(&token::Colon) ){
SelfExplicit(try!(self.parse_ty_sum()), self_ident)
} else {
SelfValue(self_ident)
}
} else if self.token.is_mutability() &&
self.look_ahead(1, |t| t.is_keyword(keywords::SelfValue)) {
mutbl_self = try!(self.parse_mutability());
let self_ident = try!(self.expect_self_ident());
// Determine whether this is the fully explicit form,
// `self: TYPE`.
if try!(self.eat(&token::Colon) ){
SelfExplicit(try!(self.parse_ty_sum()), self_ident)
} else {
SelfValue(self_ident)
}
} else {
SelfStatic
}
}
_ => SelfStatic,
};
let explicit_self_sp = mk_sp(self_ident_lo, self_ident_hi);
// shared fall-through for the three cases below. borrowing prevents simply
// writing this as a closure
macro_rules! parse_remaining_arguments {
($self_id:ident) =>
{
// If we parsed a self type, expect a comma before the argument list.
match self.token {
token::Comma => {
try!(self.bump());
let sep = seq_sep_trailing_allowed(token::Comma);
let mut fn_inputs = try!(self.parse_seq_to_before_end(
&token::CloseDelim(token::Paren),
sep,
parse_arg_fn
));
fn_inputs.insert(0, Arg::new_self(explicit_self_sp, mutbl_self, $self_id));
fn_inputs
}
token::CloseDelim(token::Paren) => {
vec!(Arg::new_self(explicit_self_sp, mutbl_self, $self_id))
}
_ => {
let token_str = self.this_token_to_string();
return Err(self.fatal(&format!("expected `,` or `)`, found `{}`",
token_str)))
}
}
}
}
let fn_inputs = match explicit_self {
SelfStatic => {
let sep = seq_sep_trailing_allowed(token::Comma);
try!(self.parse_seq_to_before_end(&token::CloseDelim(token::Paren),
sep, parse_arg_fn))
}
SelfValue(id) => parse_remaining_arguments!(id),
SelfRegion(_,_,id) => parse_remaining_arguments!(id),
SelfExplicit(_,id) => parse_remaining_arguments!(id),
};
try!(self.expect(&token::CloseDelim(token::Paren)));
let hi = self.span.hi;
let ret_ty = try!(self.parse_ret_ty());
let fn_decl = P(FnDecl {
inputs: fn_inputs,
output: ret_ty,
variadic: false
});
Ok((spanned(lo, hi, explicit_self), fn_decl))
}
// parse the |arg, arg| header on a lambda
fn parse_fn_block_decl(&mut self) -> PResult<P<FnDecl>> {
let inputs_captures = {
if try!(self.eat(&token::OrOr) ){
Vec::new()
} else {
try!(self.expect(&token::BinOp(token::Or)));
try!(self.parse_obsolete_closure_kind());
let args = try!(self.parse_seq_to_before_end(
&token::BinOp(token::Or),
seq_sep_trailing_allowed(token::Comma),
|p| p.parse_fn_block_arg()
));
try!(self.bump());
args
}
};
let output = try!(self.parse_ret_ty());
Ok(P(FnDecl {
inputs: inputs_captures,
output: output,
variadic: false
}))
}
/// Parse the name and optional generic types of a function header.
fn parse_fn_header(&mut self) -> PResult<(Ident, ast::Generics)> {
let id = try!(self.parse_ident());
let generics = try!(self.parse_generics());
Ok((id, generics))
}
fn mk_item(&mut self, lo: BytePos, hi: BytePos, ident: Ident,
node: Item_, vis: Visibility,
attrs: Vec<Attribute>) -> P<Item> {
P(Item {
ident: ident,
attrs: attrs,
id: ast::DUMMY_NODE_ID,
node: node,
vis: vis,
span: mk_sp(lo, hi)
})
}
/// Parse an item-position function declaration.
fn parse_item_fn(&mut self, unsafety: Unsafety, abi: abi::Abi) -> PResult<ItemInfo> {
let (ident, mut generics) = try!(self.parse_fn_header());
let decl = try!(self.parse_fn_decl(false));
generics.where_clause = try!(self.parse_where_clause());
let (inner_attrs, body) = try!(self.parse_inner_attrs_and_block());
Ok((ident, ItemFn(decl, unsafety, abi, generics, body), Some(inner_attrs)))
}
/// Parse an impl item.
pub fn parse_impl_item(&mut self) -> PResult<P<ImplItem>> {
maybe_whole!(no_clone self, NtImplItem);
let mut attrs = self.parse_outer_attributes();
let lo = self.span.lo;
let vis = try!(self.parse_visibility());
let (name, node) = if try!(self.eat_keyword(keywords::Type)) {
let name = try!(self.parse_ident());
try!(self.expect(&token::Eq));
let typ = try!(self.parse_ty_sum());
try!(self.expect(&token::Semi));
(name, TypeImplItem(typ))
} else if try!(self.eat_keyword(keywords::Const)) {
let name = try!(self.parse_ident());
try!(self.expect(&token::Colon));
let typ = try!(self.parse_ty_sum());
try!(self.expect(&token::Eq));
let expr = try!(self.parse_expr_nopanic());
try!(self.commit_expr_expecting(&expr, token::Semi));
(name, ConstImplItem(typ, expr))
} else {
let (name, inner_attrs, node) = try!(self.parse_impl_method(vis));
attrs.extend(inner_attrs.into_iter());
(name, node)
};
Ok(P(ImplItem {
id: ast::DUMMY_NODE_ID,
span: mk_sp(lo, self.last_span.hi),
ident: name,
vis: vis,
attrs: attrs,
node: node
}))
}
fn complain_if_pub_macro(&mut self, visa: Visibility, span: Span) {
match visa {
Public => {
self.span_err(span, "can't qualify macro invocation with `pub`");
self.fileline_help(span, "try adjusting the macro to put `pub` inside \
the invocation");
}
Inherited => (),
}
}
/// Parse a method or a macro invocation in a trait impl.
fn parse_impl_method(&mut self, vis: Visibility)
-> PResult<(Ident, Vec<ast::Attribute>, ast::ImplItem_)> {
// code copied from parse_macro_use_or_failure... abstraction!
if !self.token.is_any_keyword()
&& self.look_ahead(1, |t| *t == token::Not)
&& (self.look_ahead(2, |t| *t == token::OpenDelim(token::Paren))
|| self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace))) {
// method macro.
let last_span = self.last_span;
self.complain_if_pub_macro(vis, last_span);
let pth = try!(self.parse_path(NoTypesAllowed));
try!(self.expect(&token::Not));
// eat a matched-delimiter token tree:
let delim = try!(self.expect_open_delim());
let tts = try!(self.parse_seq_to_end(&token::CloseDelim(delim),
seq_sep_none(),
|p| p.parse_token_tree()));
let m_ = ast::MacInvocTT(pth, tts, EMPTY_CTXT);
let m: ast::Mac = codemap::Spanned { node: m_,
span: mk_sp(self.span.lo,
self.span.hi) };
if delim != token::Brace {
try!(self.expect(&token::Semi))
}
Ok((token::special_idents::invalid, vec![], ast::MacImplItem(m)))
} else {
let unsafety = try!(self.parse_unsafety());
let abi = if try!(self.eat_keyword(keywords::Extern)) {
try!(self.parse_opt_abi()).unwrap_or(abi::C)
} else {
abi::Rust
};
try!(self.expect_keyword(keywords::Fn));
let ident = try!(self.parse_ident());
let mut generics = try!(self.parse_generics());
let (explicit_self, decl) = try!(self.parse_fn_decl_with_self(|p| {
p.parse_arg()
}));
generics.where_clause = try!(self.parse_where_clause());
let (inner_attrs, body) = try!(self.parse_inner_attrs_and_block());
Ok((ident, inner_attrs, MethodImplItem(ast::MethodSig {
generics: generics,
abi: abi,
explicit_self: explicit_self,
unsafety: unsafety,
decl: decl
}, body)))
}
}
/// Parse trait Foo { ... }
fn parse_item_trait(&mut self, unsafety: Unsafety) -> PResult<ItemInfo> {
let ident = try!(self.parse_ident());
let mut tps = try!(self.parse_generics());
// Parse supertrait bounds.
let bounds = try!(self.parse_colon_then_ty_param_bounds(BoundParsingMode::Bare));
tps.where_clause = try!(self.parse_where_clause());
let meths = try!(self.parse_trait_items());
Ok((ident, ItemTrait(unsafety, tps, bounds, meths), None))
}
/// Parses items implementations variants
/// impl<T> Foo { ... }
/// impl<T> ToString for &'static T { ... }
/// impl Send for .. {}
fn parse_item_impl(&mut self, unsafety: ast::Unsafety) -> PResult<ItemInfo> {
let impl_span = self.span;
// First, parse type parameters if necessary.
let mut generics = try!(self.parse_generics());
// Special case: if the next identifier that follows is '(', don't
// allow this to be parsed as a trait.
let could_be_trait = self.token != token::OpenDelim(token::Paren);
let neg_span = self.span;
let polarity = if try!(self.eat(&token::Not) ){
ast::ImplPolarity::Negative
} else {
ast::ImplPolarity::Positive
};
// Parse the trait.
let mut ty = try!(self.parse_ty_sum());
// Parse traits, if necessary.
let opt_trait = if could_be_trait && try!(self.eat_keyword(keywords::For) ){
// New-style trait. Reinterpret the type as a trait.
match ty.node {
TyPath(None, ref path) => {
Some(TraitRef {
path: (*path).clone(),
ref_id: ty.id,
})
}
_ => {
self.span_err(ty.span, "not a trait");
None
}
}
} else {
match polarity {
ast::ImplPolarity::Negative => {
// This is a negated type implementation
// `impl !MyType {}`, which is not allowed.
self.span_err(neg_span, "inherent implementation can't be negated");
},
_ => {}
}
None
};
if try!(self.eat(&token::DotDot) ){
if generics.is_parameterized() {
self.span_err(impl_span, "default trait implementations are not \
allowed to have genercis");
}
try!(self.expect(&token::OpenDelim(token::Brace)));
try!(self.expect(&token::CloseDelim(token::Brace)));
Ok((ast_util::impl_pretty_name(&opt_trait, None),
ItemDefaultImpl(unsafety, opt_trait.unwrap()), None))
} else {
if opt_trait.is_some() {
ty = try!(self.parse_ty_sum());
}
generics.where_clause = try!(self.parse_where_clause());
try!(self.expect(&token::OpenDelim(token::Brace)));
let attrs = self.parse_inner_attributes();
let mut impl_items = vec![];
while !try!(self.eat(&token::CloseDelim(token::Brace))) {
impl_items.push(try!(self.parse_impl_item()));
}
Ok((ast_util::impl_pretty_name(&opt_trait, Some(&*ty)),
ItemImpl(unsafety, polarity, generics, opt_trait, ty, impl_items),
Some(attrs)))
}
}
/// Parse a::B<String,i32>
fn parse_trait_ref(&mut self) -> PResult<TraitRef> {
Ok(ast::TraitRef {
path: try!(self.parse_path(LifetimeAndTypesWithoutColons)),
ref_id: ast::DUMMY_NODE_ID,
})
}
fn parse_late_bound_lifetime_defs(&mut self) -> PResult<Vec<ast::LifetimeDef>> {
if try!(self.eat_keyword(keywords::For) ){
try!(self.expect(&token::Lt));
let lifetime_defs = try!(self.parse_lifetime_defs());
try!(self.expect_gt());
Ok(lifetime_defs)
} else {
Ok(Vec::new())
}
}
/// Parse for<'l> a::B<String,i32>
fn parse_poly_trait_ref(&mut self) -> PResult<PolyTraitRef> {
let lo = self.span.lo;
let lifetime_defs = try!(self.parse_late_bound_lifetime_defs());
Ok(ast::PolyTraitRef {
bound_lifetimes: lifetime_defs,
trait_ref: try!(self.parse_trait_ref()),
span: mk_sp(lo, self.last_span.hi),
})
}
/// Parse struct Foo { ... }
fn parse_item_struct(&mut self) -> PResult<ItemInfo> {
let class_name = try!(self.parse_ident());
let mut generics = try!(self.parse_generics());
if try!(self.eat(&token::Colon) ){
let ty = try!(self.parse_ty_sum());
self.span_err(ty.span, "`virtual` structs have been removed from the language");
}
// There is a special case worth noting here, as reported in issue #17904.
// If we are parsing a tuple struct it is the case that the where clause
// should follow the field list. Like so:
//
// struct Foo<T>(T) where T: Copy;
//
// If we are parsing a normal record-style struct it is the case
// that the where clause comes before the body, and after the generics.
// So if we look ahead and see a brace or a where-clause we begin
// parsing a record style struct.
//
// Otherwise if we look ahead and see a paren we parse a tuple-style
// struct.
let (fields, ctor_id) = if self.token.is_keyword(keywords::Where) {
generics.where_clause = try!(self.parse_where_clause());
if try!(self.eat(&token::Semi)) {
// If we see a: `struct Foo<T> where T: Copy;` style decl.
(Vec::new(), Some(ast::DUMMY_NODE_ID))
} else {
// If we see: `struct Foo<T> where T: Copy { ... }`
(try!(self.parse_record_struct_body(&class_name)), None)
}
// No `where` so: `struct Foo<T>;`
} else if try!(self.eat(&token::Semi) ){
(Vec::new(), Some(ast::DUMMY_NODE_ID))
// Record-style struct definition
} else if self.token == token::OpenDelim(token::Brace) {
let fields = try!(self.parse_record_struct_body(&class_name));
(fields, None)
// Tuple-style struct definition with optional where-clause.
} else {
let fields = try!(self.parse_tuple_struct_body(&class_name, &mut generics));
(fields, Some(ast::DUMMY_NODE_ID))
};
Ok((class_name,
ItemStruct(P(ast::StructDef {
fields: fields,
ctor_id: ctor_id,
}), generics),
None))
}
pub fn parse_record_struct_body(&mut self,
class_name: &ast::Ident) -> PResult<Vec<StructField>> {
let mut fields = Vec::new();
if try!(self.eat(&token::OpenDelim(token::Brace)) ){
while self.token != token::CloseDelim(token::Brace) {
fields.push(try!(self.parse_struct_decl_field(true)));
}
if fields.is_empty() {
return Err(self.fatal(&format!("unit-like struct definition should be \
written as `struct {};`",
token::get_ident(class_name.clone()))));
}
try!(self.bump());
} else {
let token_str = self.this_token_to_string();
return Err(self.fatal(&format!("expected `where`, or `{}` after struct \
name, found `{}`", "{",
token_str)));
}
Ok(fields)
}
pub fn parse_tuple_struct_body(&mut self,
class_name: &ast::Ident,
generics: &mut ast::Generics)
-> PResult<Vec<StructField>> {
// This is the case where we find `struct Foo<T>(T) where T: Copy;`
if self.check(&token::OpenDelim(token::Paren)) {
let fields = try!(self.parse_unspanned_seq(
&token::OpenDelim(token::Paren),
&token::CloseDelim(token::Paren),
seq_sep_trailing_allowed(token::Comma),
|p| {
let attrs = p.parse_outer_attributes();
let lo = p.span.lo;
let struct_field_ = ast::StructField_ {
kind: UnnamedField(try!(p.parse_visibility())),
id: ast::DUMMY_NODE_ID,
ty: try!(p.parse_ty_sum()),
attrs: attrs,
};
Ok(spanned(lo, p.span.hi, struct_field_))
}));
if fields.is_empty() {
return Err(self.fatal(&format!("unit-like struct definition should be \
written as `struct {};`",
token::get_ident(class_name.clone()))));
}
generics.where_clause = try!(self.parse_where_clause());
try!(self.expect(&token::Semi));
Ok(fields)
// This is the case where we just see struct Foo<T> where T: Copy;
} else if self.token.is_keyword(keywords::Where) {
generics.where_clause = try!(self.parse_where_clause());
try!(self.expect(&token::Semi));
Ok(Vec::new())
// This case is where we see: `struct Foo<T>;`
} else {
let token_str = self.this_token_to_string();
Err(self.fatal(&format!("expected `where`, `{}`, `(`, or `;` after struct \
name, found `{}`", "{", token_str)))
}
}
/// Parse a structure field declaration
pub fn parse_single_struct_field(&mut self,
vis: Visibility,
attrs: Vec<Attribute> )
-> PResult<StructField> {
let a_var = try!(self.parse_name_and_ty(vis, attrs));
match self.token {
token::Comma => {
try!(self.bump());
}
token::CloseDelim(token::Brace) => {}
_ => {
let span = self.span;
let token_str = self.this_token_to_string();
return Err(self.span_fatal_help(span,
&format!("expected `,`, or `}}`, found `{}`",
token_str),
"struct fields should be separated by commas"))
}
}
Ok(a_var)
}
/// Parse an element of a struct definition
fn parse_struct_decl_field(&mut self, allow_pub: bool) -> PResult<StructField> {
let attrs = self.parse_outer_attributes();
if try!(self.eat_keyword(keywords::Pub) ){
if !allow_pub {
let span = self.last_span;
self.span_err(span, "`pub` is not allowed here");
}
return self.parse_single_struct_field(Public, attrs);
}
return self.parse_single_struct_field(Inherited, attrs);
}
/// Parse visibility: PUB, PRIV, or nothing
fn parse_visibility(&mut self) -> PResult<Visibility> {
if try!(self.eat_keyword(keywords::Pub)) { Ok(Public) }
else { Ok(Inherited) }
}
/// Given a termination token, parse all of the items in a module
fn parse_mod_items(&mut self, term: &token::Token, inner_lo: BytePos) -> PResult<Mod> {
let mut items = vec![];
while let Some(item) = try!(self.parse_item_nopanic()) {
items.push(item);
}
if !try!(self.eat(term)) {
let token_str = self.this_token_to_string();
return Err(self.fatal(&format!("expected item, found `{}`", token_str)));
}
Ok(ast::Mod {
inner: mk_sp(inner_lo, self.span.lo),
items: items
})
}
fn parse_item_const(&mut self, m: Option<Mutability>) -> PResult<ItemInfo> {
let id = try!(self.parse_ident());
try!(self.expect(&token::Colon));
let ty = try!(self.parse_ty_sum());
try!(self.expect(&token::Eq));
let e = try!(self.parse_expr_nopanic());
try!(self.commit_expr_expecting(&*e, token::Semi));
let item = match m {
Some(m) => ItemStatic(ty, m, e),
None => ItemConst(ty, e),
};
Ok((id, item, None))
}
/// Parse a `mod <foo> { ... }` or `mod <foo>;` item
fn parse_item_mod(&mut self, outer_attrs: &[Attribute]) -> PResult<ItemInfo> {
let id_span = self.span;
let id = try!(self.parse_ident());
if self.check(&token::Semi) {
try!(self.bump());
// This mod is in an external file. Let's go get it!
let (m, attrs) = try!(self.eval_src_mod(id, outer_attrs, id_span));
Ok((id, m, Some(attrs)))
} else {
self.push_mod_path(id, outer_attrs);
try!(self.expect(&token::OpenDelim(token::Brace)));
let mod_inner_lo = self.span.lo;
let old_owns_directory = self.owns_directory;
self.owns_directory = true;
let attrs = self.parse_inner_attributes();
let m = try!(self.parse_mod_items(&token::CloseDelim(token::Brace), mod_inner_lo));
self.owns_directory = old_owns_directory;
self.pop_mod_path();
Ok((id, ItemMod(m), Some(attrs)))
}
}
fn push_mod_path(&mut self, id: Ident, attrs: &[Attribute]) {
let default_path = self.id_to_interned_str(id);
let file_path = match ::attr::first_attr_value_str_by_name(attrs,
"path") {
Some(d) => d,
None => default_path,
};
self.mod_path_stack.push(file_path)
}
fn pop_mod_path(&mut self) {
self.mod_path_stack.pop().unwrap();
}
/// Read a module from a source file.
fn eval_src_mod(&mut self,
id: ast::Ident,
outer_attrs: &[ast::Attribute],
id_sp: Span)
-> PResult<(ast::Item_, Vec<ast::Attribute> )> {
let mut prefix = PathBuf::from(&self.sess.span_diagnostic.cm
.span_to_filename(self.span));
prefix.pop();
let mut dir_path = prefix;
for part in &self.mod_path_stack {
dir_path.push(&**part);
}
let mod_string = token::get_ident(id);
let (file_path, owns_directory) = match ::attr::first_attr_value_str_by_name(
outer_attrs, "path") {
Some(d) => (dir_path.join(&*d), true),
None => {
let mod_name = mod_string.to_string();
let default_path_str = format!("{}.rs", mod_name);
let secondary_path_str = format!("{}/mod.rs", mod_name);
let default_path = dir_path.join(&default_path_str[..]);
let secondary_path = dir_path.join(&secondary_path_str[..]);
let default_exists = fs::metadata(&default_path).is_ok();
let secondary_exists = fs::metadata(&secondary_path).is_ok();
if !self.owns_directory {
self.span_err(id_sp,
"cannot declare a new module at this location");
let this_module = match self.mod_path_stack.last() {
Some(name) => name.to_string(),
None => self.root_module_name.as_ref().unwrap().clone(),
};
self.span_note(id_sp,
&format!("maybe move this module `{0}` \
to its own directory via \
`{0}/mod.rs`",
this_module));
if default_exists || secondary_exists {
self.span_note(id_sp,
&format!("... or maybe `use` the module \
`{}` instead of possibly \
redeclaring it",
mod_name));
}
self.abort_if_errors();
}
match (default_exists, secondary_exists) {
(true, false) => (default_path, false),
(false, true) => (secondary_path, true),
(false, false) => {
return Err(self.span_fatal_help(id_sp,
&format!("file not found for module `{}`",
mod_name),
&format!("name the file either {} or {} inside \
the directory {:?}",
default_path_str,
secondary_path_str,
dir_path.display())));
}
(true, true) => {
return Err(self.span_fatal_help(
id_sp,
&format!("file for module `{}` found at both {} \
and {}",
mod_name,
default_path_str,
secondary_path_str),
"delete or rename one of them to remove the ambiguity"));
}
}
}
};
self.eval_src_mod_from_path(file_path, owns_directory,
mod_string.to_string(), id_sp)
}
fn eval_src_mod_from_path(&mut self,
path: PathBuf,
owns_directory: bool,
name: String,
id_sp: Span) -> PResult<(ast::Item_, Vec<ast::Attribute> )> {
let mut included_mod_stack = self.sess.included_mod_stack.borrow_mut();
match included_mod_stack.iter().position(|p| *p == path) {
Some(i) => {
let mut err = String::from("circular modules: ");
let len = included_mod_stack.len();
for p in &included_mod_stack[i.. len] {
err.push_str(&p.to_string_lossy());
err.push_str(" -> ");
}
err.push_str(&path.to_string_lossy());
return Err(self.span_fatal(id_sp, &err[..]));
}
None => ()
}
included_mod_stack.push(path.clone());
drop(included_mod_stack);
let mut p0 =
new_sub_parser_from_file(self.sess,
self.cfg.clone(),
&path,
owns_directory,
Some(name),
id_sp);
let mod_inner_lo = p0.span.lo;
let mod_attrs = p0.parse_inner_attributes();
let m0 = try!(p0.parse_mod_items(&token::Eof, mod_inner_lo));
self.sess.included_mod_stack.borrow_mut().pop();
Ok((ast::ItemMod(m0), mod_attrs))
}
/// Parse a function declaration from a foreign module
fn parse_item_foreign_fn(&mut self, vis: ast::Visibility,
attrs: Vec<Attribute>) -> PResult<P<ForeignItem>> {
let lo = self.span.lo;
try!(self.expect_keyword(keywords::Fn));
let (ident, mut generics) = try!(self.parse_fn_header());
let decl = try!(self.parse_fn_decl(true));
generics.where_clause = try!(self.parse_where_clause());
let hi = self.span.hi;
try!(self.expect(&token::Semi));
Ok(P(ast::ForeignItem {
ident: ident,
attrs: attrs,
node: ForeignItemFn(decl, generics),
id: ast::DUMMY_NODE_ID,
span: mk_sp(lo, hi),
vis: vis
}))
}
/// Parse a static item from a foreign module
fn parse_item_foreign_static(&mut self, vis: ast::Visibility,
attrs: Vec<Attribute>) -> PResult<P<ForeignItem>> {
let lo = self.span.lo;
try!(self.expect_keyword(keywords::Static));
let mutbl = try!(self.eat_keyword(keywords::Mut));
let ident = try!(self.parse_ident());
try!(self.expect(&token::Colon));
let ty = try!(self.parse_ty_sum());
let hi = self.span.hi;
try!(self.expect(&token::Semi));
Ok(P(ForeignItem {
ident: ident,
attrs: attrs,
node: ForeignItemStatic(ty, mutbl),
id: ast::DUMMY_NODE_ID,
span: mk_sp(lo, hi),
vis: vis
}))
}
/// Parse extern crate links
///
/// # Examples
///
/// extern crate foo;
/// extern crate bar as foo;
fn parse_item_extern_crate(&mut self,
lo: BytePos,
visibility: Visibility,
attrs: Vec<Attribute>)
-> PResult<P<Item>> {
let crate_name = try!(self.parse_ident());
let (maybe_path, ident) = if try!(self.eat_keyword(keywords::As)) {
(Some(crate_name.name), try!(self.parse_ident()))
} else {
(None, crate_name)
};
try!(self.expect(&token::Semi));
let last_span = self.last_span;
Ok(self.mk_item(lo,
last_span.hi,
ident,
ItemExternCrate(maybe_path),
visibility,
attrs))
}
/// Parse `extern` for foreign ABIs
/// modules.
///
/// `extern` is expected to have been
/// consumed before calling this method
///
/// # Examples:
///
/// extern "C" {}
/// extern {}
fn parse_item_foreign_mod(&mut self,
lo: BytePos,
opt_abi: Option<abi::Abi>,
visibility: Visibility,
mut attrs: Vec<Attribute>)
-> PResult<P<Item>> {
try!(self.expect(&token::OpenDelim(token::Brace)));
let abi = opt_abi.unwrap_or(abi::C);
attrs.extend(self.parse_inner_attributes().into_iter());
let mut foreign_items = vec![];
while let Some(item) = try!(self.parse_foreign_item()) {
foreign_items.push(item);
}
try!(self.expect(&token::CloseDelim(token::Brace)));
let last_span = self.last_span;
let m = ast::ForeignMod {
abi: abi,
items: foreign_items
};
Ok(self.mk_item(lo,
last_span.hi,
special_idents::invalid,
ItemForeignMod(m),
visibility,
attrs))
}
/// Parse type Foo = Bar;
fn parse_item_type(&mut self) -> PResult<ItemInfo> {
let ident = try!(self.parse_ident());
let mut tps = try!(self.parse_generics());
tps.where_clause = try!(self.parse_where_clause());
try!(self.expect(&token::Eq));
let ty = try!(self.parse_ty_sum());
try!(self.expect(&token::Semi));
Ok((ident, ItemTy(ty, tps), None))
}
/// Parse a structure-like enum variant definition
/// this should probably be renamed or refactored...
fn parse_struct_def(&mut self) -> PResult<P<StructDef>> {
let mut fields: Vec<StructField> = Vec::new();
while self.token != token::CloseDelim(token::Brace) {
fields.push(try!(self.parse_struct_decl_field(false)));
}
try!(self.bump());
Ok(P(StructDef {
fields: fields,
ctor_id: None,
}))
}
/// Parse the part of an "enum" decl following the '{'
fn parse_enum_def(&mut self, _generics: &ast::Generics) -> PResult<EnumDef> {
let mut variants = Vec::new();
let mut all_nullary = true;
let mut any_disr = None;
while self.token != token::CloseDelim(token::Brace) {
let variant_attrs = self.parse_outer_attributes();
let vlo = self.span.lo;
let vis = try!(self.parse_visibility());
let ident;
let kind;
let mut args = Vec::new();
let mut disr_expr = None;
ident = try!(self.parse_ident());
if try!(self.eat(&token::OpenDelim(token::Brace)) ){
// Parse a struct variant.
all_nullary = false;
let start_span = self.span;
let struct_def = try!(self.parse_struct_def());
if struct_def.fields.is_empty() {
self.span_err(start_span,
&format!("unit-like struct variant should be written \
without braces, as `{},`",
token::get_ident(ident)));
}
kind = StructVariantKind(struct_def);
} else if self.check(&token::OpenDelim(token::Paren)) {
all_nullary = false;
let arg_tys = try!(self.parse_enum_variant_seq(
&token::OpenDelim(token::Paren),
&token::CloseDelim(token::Paren),
seq_sep_trailing_allowed(token::Comma),
|p| p.parse_ty_sum()
));
for ty in arg_tys {
args.push(ast::VariantArg {
ty: ty,
id: ast::DUMMY_NODE_ID,
});
}
kind = TupleVariantKind(args);
} else if try!(self.eat(&token::Eq) ){
disr_expr = Some(try!(self.parse_expr_nopanic()));
any_disr = disr_expr.as_ref().map(|expr| expr.span);
kind = TupleVariantKind(args);
} else {
kind = TupleVariantKind(Vec::new());
}
let vr = ast::Variant_ {
name: ident,
attrs: variant_attrs,
kind: kind,
id: ast::DUMMY_NODE_ID,
disr_expr: disr_expr,
vis: vis,
};
variants.push(P(spanned(vlo, self.last_span.hi, vr)));
if !try!(self.eat(&token::Comma)) { break; }
}
try!(self.expect(&token::CloseDelim(token::Brace)));
match any_disr {
Some(disr_span) if !all_nullary =>
self.span_err(disr_span,
"discriminator values can only be used with a c-like enum"),
_ => ()
}
Ok(ast::EnumDef { variants: variants })
}
/// Parse an "enum" declaration
fn parse_item_enum(&mut self) -> PResult<ItemInfo> {
let id = try!(self.parse_ident());
let mut generics = try!(self.parse_generics());
generics.where_clause = try!(self.parse_where_clause());
try!(self.expect(&token::OpenDelim(token::Brace)));
let enum_definition = try!(self.parse_enum_def(&generics));
Ok((id, ItemEnum(enum_definition, generics), None))
}
/// Parses a string as an ABI spec on an extern type or module. Consumes
/// the `extern` keyword, if one is found.
fn parse_opt_abi(&mut self) -> PResult<Option<abi::Abi>> {
match self.token {
token::Literal(token::Str_(s), suf) | token::Literal(token::StrRaw(s, _), suf) => {
let sp = self.span;
self.expect_no_suffix(sp, "ABI spec", suf);
try!(self.bump());
let the_string = s.as_str();
match abi::lookup(the_string) {
Some(abi) => Ok(Some(abi)),
None => {
let last_span = self.last_span;
self.span_err(
last_span,
&format!("illegal ABI: expected one of [{}], \
found `{}`",
abi::all_names().connect(", "),
the_string));
Ok(None)
}
}
}
_ => Ok(None),
}
}
/// Parse one of the items allowed by the flags.
/// NB: this function no longer parses the items inside an
/// extern crate.
fn parse_item_(&mut self, attrs: Vec<Attribute>,
macros_allowed: bool) -> PResult<Option<P<Item>>> {
let nt_item = match self.token {
token::Interpolated(token::NtItem(ref item)) => {
Some((**item).clone())
}
_ => None
};
match nt_item {
Some(mut item) => {
try!(self.bump());
let mut attrs = attrs;
mem::swap(&mut item.attrs, &mut attrs);
item.attrs.extend(attrs.into_iter());
return Ok(Some(P(item)));
}
None => {}
}
let lo = self.span.lo;
let visibility = try!(self.parse_visibility());
if try!(self.eat_keyword(keywords::Use) ){
// USE ITEM
let item_ = ItemUse(try!(self.parse_view_path()));
try!(self.expect(&token::Semi));
let last_span = self.last_span;
let item = self.mk_item(lo,
last_span.hi,
token::special_idents::invalid,
item_,
visibility,
attrs);
return Ok(Some(item));
}
if try!(self.eat_keyword(keywords::Extern)) {
if try!(self.eat_keyword(keywords::Crate)) {
return Ok(Some(try!(self.parse_item_extern_crate(lo, visibility, attrs))));
}
let opt_abi = try!(self.parse_opt_abi());
if try!(self.eat_keyword(keywords::Fn) ){
// EXTERN FUNCTION ITEM
let abi = opt_abi.unwrap_or(abi::C);
let (ident, item_, extra_attrs) =
try!(self.parse_item_fn(Unsafety::Normal, abi));
let last_span = self.last_span;
let item = self.mk_item(lo,
last_span.hi,
ident,
item_,
visibility,
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
} else if self.check(&token::OpenDelim(token::Brace)) {
return Ok(Some(try!(self.parse_item_foreign_mod(lo, opt_abi, visibility, attrs))));
}
let span = self.span;
let token_str = self.this_token_to_string();
return Err(self.span_fatal(span,
&format!("expected `{}` or `fn`, found `{}`", "{",
token_str)))
}
if try!(self.eat_keyword_noexpect(keywords::Virtual) ){
let span = self.span;
self.span_err(span, "`virtual` structs have been removed from the language");
}
if try!(self.eat_keyword(keywords::Static) ){
// STATIC ITEM
let m = if try!(self.eat_keyword(keywords::Mut)) {MutMutable} else {MutImmutable};
let (ident, item_, extra_attrs) = try!(self.parse_item_const(Some(m)));
let last_span = self.last_span;
let item = self.mk_item(lo,
last_span.hi,
ident,
item_,
visibility,
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
if try!(self.eat_keyword(keywords::Const) ){
// CONST ITEM
if try!(self.eat_keyword(keywords::Mut) ){
let last_span = self.last_span;
self.span_err(last_span, "const globals cannot be mutable");
self.fileline_help(last_span, "did you mean to declare a static?");
}
let (ident, item_, extra_attrs) = try!(self.parse_item_const(None));
let last_span = self.last_span;
let item = self.mk_item(lo,
last_span.hi,
ident,
item_,
visibility,
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
if self.check_keyword(keywords::Unsafe) &&
self.look_ahead(1, |t| t.is_keyword(keywords::Trait))
{
// UNSAFE TRAIT ITEM
try!(self.expect_keyword(keywords::Unsafe));
try!(self.expect_keyword(keywords::Trait));
let (ident, item_, extra_attrs) =
try!(self.parse_item_trait(ast::Unsafety::Unsafe));
let last_span = self.last_span;
let item = self.mk_item(lo,
last_span.hi,
ident,
item_,
visibility,
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
if self.check_keyword(keywords::Unsafe) &&
self.look_ahead(1, |t| t.is_keyword(keywords::Impl))
{
// IMPL ITEM
try!(self.expect_keyword(keywords::Unsafe));
try!(self.expect_keyword(keywords::Impl));
let (ident, item_, extra_attrs) = try!(self.parse_item_impl(ast::Unsafety::Unsafe));
let last_span = self.last_span;
let item = self.mk_item(lo,
last_span.hi,
ident,
item_,
visibility,
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
if self.check_keyword(keywords::Fn) {
// FUNCTION ITEM
try!(self.bump());
let (ident, item_, extra_attrs) =
try!(self.parse_item_fn(Unsafety::Normal, abi::Rust));
let last_span = self.last_span;
let item = self.mk_item(lo,
last_span.hi,
ident,
item_,
visibility,
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
if self.check_keyword(keywords::Unsafe)
&& self.look_ahead(1, |t| *t != token::OpenDelim(token::Brace)) {
// UNSAFE FUNCTION ITEM
try!(self.bump());
let abi = if try!(self.eat_keyword(keywords::Extern) ){
try!(self.parse_opt_abi()).unwrap_or(abi::C)
} else {
abi::Rust
};
try!(self.expect_keyword(keywords::Fn));
let (ident, item_, extra_attrs) =
try!(self.parse_item_fn(Unsafety::Unsafe, abi));
let last_span = self.last_span;
let item = self.mk_item(lo,
last_span.hi,
ident,
item_,
visibility,
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
if try!(self.eat_keyword(keywords::Mod) ){
// MODULE ITEM
let (ident, item_, extra_attrs) =
try!(self.parse_item_mod(&attrs[..]));
let last_span = self.last_span;
let item = self.mk_item(lo,
last_span.hi,
ident,
item_,
visibility,
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
if try!(self.eat_keyword(keywords::Type) ){
// TYPE ITEM
let (ident, item_, extra_attrs) = try!(self.parse_item_type());
let last_span = self.last_span;
let item = self.mk_item(lo,
last_span.hi,
ident,
item_,
visibility,
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
if try!(self.eat_keyword(keywords::Enum) ){
// ENUM ITEM
let (ident, item_, extra_attrs) = try!(self.parse_item_enum());
let last_span = self.last_span;
let item = self.mk_item(lo,
last_span.hi,
ident,
item_,
visibility,
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
if try!(self.eat_keyword(keywords::Trait) ){
// TRAIT ITEM
let (ident, item_, extra_attrs) =
try!(self.parse_item_trait(ast::Unsafety::Normal));
let last_span = self.last_span;
let item = self.mk_item(lo,
last_span.hi,
ident,
item_,
visibility,
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
if try!(self.eat_keyword(keywords::Impl) ){
// IMPL ITEM
let (ident, item_, extra_attrs) = try!(self.parse_item_impl(ast::Unsafety::Normal));
let last_span = self.last_span;
let item = self.mk_item(lo,
last_span.hi,
ident,
item_,
visibility,
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
if try!(self.eat_keyword(keywords::Struct) ){
// STRUCT ITEM
let (ident, item_, extra_attrs) = try!(self.parse_item_struct());
let last_span = self.last_span;
let item = self.mk_item(lo,
last_span.hi,
ident,
item_,
visibility,
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
self.parse_macro_use_or_failure(attrs,macros_allowed,lo,visibility)
}
/// Parse a foreign item.
fn parse_foreign_item(&mut self) -> PResult<Option<P<ForeignItem>>> {
let attrs = self.parse_outer_attributes();
let lo = self.span.lo;
let visibility = try!(self.parse_visibility());
if self.check_keyword(keywords::Static) {
// FOREIGN STATIC ITEM
return Ok(Some(try!(self.parse_item_foreign_static(visibility, attrs))));
}
if self.check_keyword(keywords::Fn) || self.check_keyword(keywords::Unsafe) {
// FOREIGN FUNCTION ITEM
return Ok(Some(try!(self.parse_item_foreign_fn(visibility, attrs))));
}
// FIXME #5668: this will occur for a macro invocation:
match try!(self.parse_macro_use_or_failure(attrs, true, lo, visibility)) {
Some(item) => {
return Err(self.span_fatal(item.span, "macros cannot expand to foreign items"));
}
None => Ok(None)
}
}
/// This is the fall-through for parsing items.
fn parse_macro_use_or_failure(
&mut self,
attrs: Vec<Attribute> ,
macros_allowed: bool,
lo: BytePos,
visibility: Visibility
) -> PResult<Option<P<Item>>> {
if macros_allowed && !self.token.is_any_keyword()
&& self.look_ahead(1, |t| *t == token::Not)
&& (self.look_ahead(2, |t| t.is_plain_ident())
|| self.look_ahead(2, |t| *t == token::OpenDelim(token::Paren))
|| self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace))) {
// MACRO INVOCATION ITEM
let last_span = self.last_span;
self.complain_if_pub_macro(visibility, last_span);
// item macro.
let pth = try!(self.parse_path(NoTypesAllowed));
try!(self.expect(&token::Not));
// a 'special' identifier (like what `macro_rules!` uses)
// is optional. We should eventually unify invoc syntax
// and remove this.
let id = if self.token.is_plain_ident() {
try!(self.parse_ident())
} else {
token::special_idents::invalid // no special identifier
};
// eat a matched-delimiter token tree:
let delim = try!(self.expect_open_delim());
let tts = try!(self.parse_seq_to_end(&token::CloseDelim(delim),
seq_sep_none(),
|p| p.parse_token_tree()));
// single-variant-enum... :
let m = ast::MacInvocTT(pth, tts, EMPTY_CTXT);
let m: ast::Mac = codemap::Spanned { node: m,
span: mk_sp(self.span.lo,
self.span.hi) };
if delim != token::Brace {
if !try!(self.eat(&token::Semi) ){
let last_span = self.last_span;
self.span_err(last_span,
"macros that expand to items must either \
be surrounded with braces or followed by \
a semicolon");
}
}
let item_ = ItemMac(m);
let last_span = self.last_span;
let item = self.mk_item(lo,
last_span.hi,
id,
item_,
visibility,
attrs);
return Ok(Some(item));
}
// FAILURE TO PARSE ITEM
match visibility {
Inherited => {}
Public => {
let last_span = self.last_span;
return Err(self.span_fatal(last_span, "unmatched visibility `pub`"));
}
}
if !attrs.is_empty() {
self.expected_item_err(&attrs);
}
Ok(None)
}
pub fn parse_item_nopanic(&mut self) -> PResult<Option<P<Item>>> {
let attrs = self.parse_outer_attributes();
self.parse_item_(attrs, true)
}
/// Matches view_path : MOD? non_global_path as IDENT
/// | MOD? non_global_path MOD_SEP LBRACE RBRACE
/// | MOD? non_global_path MOD_SEP LBRACE ident_seq RBRACE
/// | MOD? non_global_path MOD_SEP STAR
/// | MOD? non_global_path
fn parse_view_path(&mut self) -> PResult<P<ViewPath>> {
let lo = self.span.lo;
// Allow a leading :: because the paths are absolute either way.
// This occurs with "use $crate::..." in macros.
try!(self.eat(&token::ModSep));
if self.check(&token::OpenDelim(token::Brace)) {
// use {foo,bar}
let idents = try!(self.parse_unspanned_seq(
&token::OpenDelim(token::Brace),
&token::CloseDelim(token::Brace),
seq_sep_trailing_allowed(token::Comma),
|p| p.parse_path_list_item()));
let path = ast::Path {
span: mk_sp(lo, self.span.hi),
global: false,
segments: Vec::new()
};
return Ok(P(spanned(lo, self.span.hi, ViewPathList(path, idents))));
}
let first_ident = try!(self.parse_ident());
let mut path = vec!(first_ident);
if let token::ModSep = self.token {
// foo::bar or foo::{a,b,c} or foo::* | match self.token {
token::Ident(..) => {
let ident = try!(self.parse_ident());
path.push(ident);
}
// foo::bar::{a,b,c}
token::OpenDelim(token::Brace) => {
let idents = try!(self.parse_unspanned_seq(
&token::OpenDelim(token::Brace),
&token::CloseDelim(token::Brace),
seq_sep_trailing_allowed(token::Comma),
|p| p.parse_path_list_item()
));
let path = ast::Path {
span: mk_sp(lo, self.span.hi),
global: false,
segments: path.into_iter().map(|identifier| {
ast::PathSegment {
identifier: identifier,
parameters: ast::PathParameters::none(),
}
}).collect()
};
return Ok(P(spanned(lo, self.span.hi, ViewPathList(path, idents))));
}
// foo::bar::*
token::BinOp(token::Star) => {
try!(self.bump());
let path = ast::Path {
span: mk_sp(lo, self.span.hi),
global: false,
segments: path.into_iter().map(|identifier| {
ast::PathSegment {
identifier: identifier,
parameters: ast::PathParameters::none(),
}
}).collect()
};
return Ok(P(spanned(lo, self.span.hi, ViewPathGlob(path))));
}
// fall-through for case foo::bar::;
token::Semi => {
self.span_err(self.span, "expected identifier or `{` or `*`, found `;`");
}
_ => break
}
}
}
let mut rename_to = path[path.len() - 1];
let path = ast::Path {
span: mk_sp(lo, self.last_span.hi),
global: false,
segments: path.into_iter().map(|identifier| {
ast::PathSegment {
identifier: identifier,
parameters: ast::PathParameters::none(),
}
}).collect()
};
if try!(self.eat_keyword(keywords::As)) {
rename_to = try!(self.parse_ident())
}
Ok(P(spanned(lo, self.last_span.hi, ViewPathSimple(rename_to, path))))
}
/// Parses a source module as a crate. This is the main
/// entry point for the parser.
pub fn parse_crate_mod(&mut self) -> PResult<Crate> {
let lo = self.span.lo;
Ok(ast::Crate {
attrs: self.parse_inner_attributes(),
module: try!(self.parse_mod_items(&token::Eof, lo)),
config: self.cfg.clone(),
span: mk_sp(lo, self.span.lo),
exported_macros: Vec::new(),
})
}
pub fn parse_optional_str(&mut self)
-> PResult<Option<(InternedString,
ast::StrStyle,
Option<ast::Name>)>> {
let ret = match self.token {
token::Literal(token::Str_(s), suf) => {
(self.id_to_interned_str(s.ident()), ast::CookedStr, suf)
}
token::Literal(token::StrRaw(s, n), suf) => {
(self.id_to_interned_str(s.ident()), ast::RawStr(n), suf)
}
_ => return Ok(None)
};
try!(self.bump());
Ok(Some(ret))
}
pub fn parse_str(&mut self) -> PResult<(InternedString, StrStyle)> {
match try!(self.parse_optional_str()) {
Some((s, style, suf)) => {
let sp = self.last_span;
self.expect_no_suffix(sp, "str literal", suf);
Ok((s, style))
}
_ => Err(self.fatal("expected string literal"))
}
}
} | while self.check(&token::ModSep) {
try!(self.bump());
|
user.controller.ts | import {UserService} from "./user.service";
import * as express from'express';
import {UserAttributes} from "../../model/mysqlmodels/User";
export class | {
private _userService: UserService
constructor() {
this._userService = new UserService();
}
retrieve = (req: express.Request, res: express.Response, next: express.NextFunction)=> {
this._userService.retrieve()
.subscribe((data)=> {
res.json(data);
}, (err)=> {
next(err);
});
}
findById: express.RequestHandler = (req: express.Request, res: express.Response, next: express.NextFunction)=> {
this._userService.findById(req.authInfo.userId)
.subscribe((data)=> {
res.json(data);
}, (err)=> {
next(err);
})
};
create = (req: express.Request, res: express.Response, next: express.NextFunction)=> {
console.log(req.body);
this._userService.create(<UserAttributes>req.body)
.subscribe((data)=> {
res.json(data);
}, (err)=> {
next(err);
});
};
update = (req: express.Request, res: express.Response, next: express.NextFunction)=> {
this._userService.update(req.params.id, req.body)
.subscribe(()=> {
res.json({status:"ok"});
}, (err)=> {
next(err);
});
};
delete = (req: express.Request, res: express.Response)=> {
//do not need delete;
};
}
| UserController |
compute-invoice.js | import flow from 'lodash.flow'
import crio from 'crio'
import shortid from 'shortid'
function updatePayments(invoice) {
const payments = invoice.get(`payments`)
if (!crio.isArray(payments)) return invoice
const updatedPayments = payments
.filter(payment => payment.message || payment.date || payment.amount)
.map(payment => {
if (!payment._id) return payment.set(`_id`, shortid())
return payment
})
.push(
crio({
_id: shortid(),
message: ``,
date: ``,
amount: 0,
}),
)
return invoice.set(`payments`, updatedPayments)
}
function updatePaymentsFieldPath(invoice) {
const payments = invoice.get(`payments`)
if (!crio.isArray(payments)) return invoice
const updated = payments.map((payment, index) => {
return payment.set(`_fieldPath`, `payments[${index}]`)
})
return invoice.set(`payments`, updated)
}
function recomputeTotals(invoice) {
const payments = invoice.get(`payments`)
if (!crio.isArray(payments)) return invoice | const paid = payments.reduce(
(acc, payment) => parseFloat(payment.amount, 10) + acc,
0,
)
const left = total - paid
return invoice.set(`totalPaid`, paid).set(`totalLeft`, left)
}
function isPaymentFieldName(inputName) {
return /^payments\[\d+\]/.test(inputName)
}
export const all = flow(
updatePayments,
updatePaymentsFieldPath,
recomputeTotals,
) | const total = invoice.get(`total`) |
msvs_emulation.py | # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
This module helps emulate Visual Studio 2008 behavior on top of other
build systems, primarily ninja.
"""
import os
import re
import subprocess
import sys
from gyp.common import OrderedSet
import gyp.MSVSUtil
import gyp.MSVSVersion
windows_quoter_regex = re.compile(r'(\\*)"')
def QuoteForRspFile(arg):
"""Quote a command line argument so that it appears as one argument when
processed via cmd.exe and parsed by CommandLineToArgvW (as is typical for
Windows programs)."""
# See http://goo.gl/cuFbX and http://goo.gl/dhPnp including the comment
# threads. This is actually the quoting rules for CommandLineToArgvW, not
# for the shell, because the shell doesn't do anything in Windows. This
# works more or less because most programs (including the compiler, etc.)
# use that function to handle command line arguments.
# Use a heuristic to try to find args that are paths, and normalize them
if arg.find("/") > 0 or arg.count("/") > 1:
arg = os.path.normpath(arg)
# For a literal quote, CommandLineToArgvW requires 2n+1 backslashes
# preceding it, and results in n backslashes + the quote. So we substitute
# in 2* what we match, +1 more, plus the quote.
arg = windows_quoter_regex.sub(lambda mo: 2 * mo.group(1) + '\\"', arg)
# %'s also need to be doubled otherwise they're interpreted as batch
# positional arguments. Also make sure to escape the % so that they're
# passed literally through escaping so they can be singled to just the
# original %. Otherwise, trying to pass the literal representation that
# looks like an environment variable to the shell (e.g. %PATH%) would fail.
arg = arg.replace("%", "%%")
# These commands are used in rsp files, so no escaping for the shell (via ^)
# is necessary.
# Finally, wrap the whole thing in quotes so that the above quote rule
# applies and whitespace isn't a word break.
return '"' + arg + '"'
def | (args):
"""Process a list of arguments using QuoteCmdExeArgument."""
# Note that the first argument is assumed to be the command. Don't add
# quotes around it because then built-ins like 'echo', etc. won't work.
# Take care to normpath only the path in the case of 'call ../x.bat' because
# otherwise the whole thing is incorrectly interpreted as a path and not
# normalized correctly.
if not args:
return ""
if args[0].startswith("call "):
call, program = args[0].split(" ", 1)
program = call + " " + os.path.normpath(program)
else:
program = os.path.normpath(args[0])
return program + " " + " ".join(QuoteForRspFile(arg) for arg in args[1:])
def _GenericRetrieve(root, default, path):
"""Given a list of dictionary keys |path| and a tree of dicts |root|, find
value at path, or return |default| if any of the path doesn't exist."""
if not root:
return default
if not path:
return root
return _GenericRetrieve(root.get(path[0]), default, path[1:])
def _AddPrefix(element, prefix):
"""Add |prefix| to |element| or each subelement if element is iterable."""
if element is None:
return element
# Note, not Iterable because we don't want to handle strings like that.
if isinstance(element, list) or isinstance(element, tuple):
return [prefix + e for e in element]
else:
return prefix + element
def _DoRemapping(element, map):
"""If |element| then remap it through |map|. If |element| is iterable then
each item will be remapped. Any elements not found will be removed."""
if map is not None and element is not None:
if not callable(map):
map = map.get # Assume it's a dict, otherwise a callable to do the remap.
if isinstance(element, list) or isinstance(element, tuple):
element = filter(None, [map(elem) for elem in element])
else:
element = map(element)
return element
def _AppendOrReturn(append, element):
"""If |append| is None, simply return |element|. If |append| is not None,
then add |element| to it, adding each item in |element| if it's a list or
tuple."""
if append is not None and element is not None:
if isinstance(element, list) or isinstance(element, tuple):
append.extend(element)
else:
append.append(element)
else:
return element
def _FindDirectXInstallation():
"""Try to find an installation location for the DirectX SDK. Check for the
standard environment variable, and if that doesn't exist, try to find
via the registry. May return None if not found in either location."""
# Return previously calculated value, if there is one
if hasattr(_FindDirectXInstallation, "dxsdk_dir"):
return _FindDirectXInstallation.dxsdk_dir
dxsdk_dir = os.environ.get("DXSDK_DIR")
if not dxsdk_dir:
# Setup params to pass to and attempt to launch reg.exe.
cmd = ["reg.exe", "query", r"HKLM\Software\Microsoft\DirectX", "/s"]
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = p.communicate()[0].decode("utf-8")
for line in stdout.splitlines():
if "InstallPath" in line:
dxsdk_dir = line.split(" ")[3] + "\\"
# Cache return value
_FindDirectXInstallation.dxsdk_dir = dxsdk_dir
return dxsdk_dir
def GetGlobalVSMacroEnv(vs_version):
"""Get a dict of variables mapping internal VS macro names to their gyp
equivalents. Returns all variables that are independent of the target."""
env = {}
# '$(VSInstallDir)' and '$(VCInstallDir)' are available when and only when
# Visual Studio is actually installed.
if vs_version.Path():
env["$(VSInstallDir)"] = vs_version.Path()
env["$(VCInstallDir)"] = os.path.join(vs_version.Path(), "VC") + "\\"
# Chromium uses DXSDK_DIR in include/lib paths, but it may or may not be
# set. This happens when the SDK is sync'd via src-internal, rather than
# by typical end-user installation of the SDK. If it's not set, we don't
# want to leave the unexpanded variable in the path, so simply strip it.
dxsdk_dir = _FindDirectXInstallation()
env["$(DXSDK_DIR)"] = dxsdk_dir if dxsdk_dir else ""
# Try to find an installation location for the Windows DDK by checking
# the WDK_DIR environment variable, may be None.
env["$(WDK_DIR)"] = os.environ.get("WDK_DIR", "")
return env
def ExtractSharedMSVSSystemIncludes(configs, generator_flags):
"""Finds msvs_system_include_dirs that are common to all targets, removes
them from all targets, and returns an OrderedSet containing them."""
all_system_includes = OrderedSet(configs[0].get("msvs_system_include_dirs", []))
for config in configs[1:]:
system_includes = config.get("msvs_system_include_dirs", [])
all_system_includes = all_system_includes & OrderedSet(system_includes)
if not all_system_includes:
return None
# Expand macros in all_system_includes.
env = GetGlobalVSMacroEnv(GetVSVersion(generator_flags))
expanded_system_includes = OrderedSet(
[ExpandMacros(include, env) for include in all_system_includes]
)
if any(["$" in include for include in expanded_system_includes]):
# Some path relies on target-specific variables, bail.
return None
# Remove system includes shared by all targets from the targets.
for config in configs:
includes = config.get("msvs_system_include_dirs", [])
if includes: # Don't insert a msvs_system_include_dirs key if not needed.
# This must check the unexpanded includes list:
new_includes = [i for i in includes if i not in all_system_includes]
config["msvs_system_include_dirs"] = new_includes
return expanded_system_includes
class MsvsSettings:
"""A class that understands the gyp 'msvs_...' values (especially the
msvs_settings field). They largely correpond to the VS2008 IDE DOM. This
class helps map those settings to command line options."""
def __init__(self, spec, generator_flags):
self.spec = spec
self.vs_version = GetVSVersion(generator_flags)
supported_fields = [
("msvs_configuration_attributes", dict),
("msvs_settings", dict),
("msvs_system_include_dirs", list),
("msvs_disabled_warnings", list),
("msvs_precompiled_header", str),
("msvs_precompiled_source", str),
("msvs_configuration_platform", str),
("msvs_target_platform", str),
]
configs = spec["configurations"]
for field, default in supported_fields:
setattr(self, field, {})
for configname, config in configs.items():
getattr(self, field)[configname] = config.get(field, default())
self.msvs_cygwin_dirs = spec.get("msvs_cygwin_dirs", ["."])
unsupported_fields = [
"msvs_prebuild",
"msvs_postbuild",
]
unsupported = []
for field in unsupported_fields:
for config in configs.values():
if field in config:
unsupported += [
"{} not supported (target {}).".format(
field, spec["target_name"]
)
]
if unsupported:
raise Exception("\n".join(unsupported))
def GetExtension(self):
"""Returns the extension for the target, with no leading dot.
Uses 'product_extension' if specified, otherwise uses MSVS defaults based on
the target type.
"""
ext = self.spec.get("product_extension", None)
if ext:
return ext
return gyp.MSVSUtil.TARGET_TYPE_EXT.get(self.spec["type"], "")
def GetVSMacroEnv(self, base_to_build=None, config=None):
"""Get a dict of variables mapping internal VS macro names to their gyp
equivalents."""
target_arch = self.GetArch(config)
if target_arch == "x86":
target_platform = "Win32"
else:
target_platform = target_arch
target_name = self.spec.get("product_prefix", "") + self.spec.get(
"product_name", self.spec["target_name"]
)
target_dir = base_to_build + "\\" if base_to_build else ""
target_ext = "." + self.GetExtension()
target_file_name = target_name + target_ext
replacements = {
"$(InputName)": "${root}",
"$(InputPath)": "${source}",
"$(IntDir)": "$!INTERMEDIATE_DIR",
"$(OutDir)\\": target_dir,
"$(PlatformName)": target_platform,
"$(ProjectDir)\\": "",
"$(ProjectName)": self.spec["target_name"],
"$(TargetDir)\\": target_dir,
"$(TargetExt)": target_ext,
"$(TargetFileName)": target_file_name,
"$(TargetName)": target_name,
"$(TargetPath)": os.path.join(target_dir, target_file_name),
}
replacements.update(GetGlobalVSMacroEnv(self.vs_version))
return replacements
def ConvertVSMacros(self, s, base_to_build=None, config=None):
"""Convert from VS macro names to something equivalent."""
env = self.GetVSMacroEnv(base_to_build, config=config)
return ExpandMacros(s, env)
def AdjustLibraries(self, libraries):
"""Strip -l from library if it's specified with that."""
libs = [lib[2:] if lib.startswith("-l") else lib for lib in libraries]
return [
lib + ".lib"
if not lib.lower().endswith(".lib") and not lib.lower().endswith(".obj")
else lib
for lib in libs
]
def _GetAndMunge(self, field, path, default, prefix, append, map):
"""Retrieve a value from |field| at |path| or return |default|. If
|append| is specified, and the item is found, it will be appended to that
object instead of returned. If |map| is specified, results will be
remapped through |map| before being returned or appended."""
result = _GenericRetrieve(field, default, path)
result = _DoRemapping(result, map)
result = _AddPrefix(result, prefix)
return _AppendOrReturn(append, result)
class _GetWrapper:
def __init__(self, parent, field, base_path, append=None):
self.parent = parent
self.field = field
self.base_path = [base_path]
self.append = append
def __call__(self, name, map=None, prefix="", default=None):
return self.parent._GetAndMunge(
self.field,
self.base_path + [name],
default=default,
prefix=prefix,
append=self.append,
map=map,
)
def GetArch(self, config):
"""Get architecture based on msvs_configuration_platform and
msvs_target_platform. Returns either 'x86' or 'x64'."""
configuration_platform = self.msvs_configuration_platform.get(config, "")
platform = self.msvs_target_platform.get(config, "")
if not platform: # If no specific override, use the configuration's.
platform = configuration_platform
# Map from platform to architecture.
return {"Win32": "x86", "x64": "x64", "ARM64": "arm64"}.get(platform, "x86")
def _TargetConfig(self, config):
"""Returns the target-specific configuration."""
# There's two levels of architecture/platform specification in VS. The
# first level is globally for the configuration (this is what we consider
# "the" config at the gyp level, which will be something like 'Debug' or
# 'Release'), VS2015 and later only use this level
if self.vs_version.short_name >= 2015:
return config
# and a second target-specific configuration, which is an
# override for the global one. |config| is remapped here to take into
# account the local target-specific overrides to the global configuration.
arch = self.GetArch(config)
if arch == "x64" and not config.endswith("_x64"):
config += "_x64"
if arch == "x86" and config.endswith("_x64"):
config = config.rsplit("_", 1)[0]
return config
def _Setting(self, path, config, default=None, prefix="", append=None, map=None):
"""_GetAndMunge for msvs_settings."""
return self._GetAndMunge(
self.msvs_settings[config], path, default, prefix, append, map
)
def _ConfigAttrib(
self, path, config, default=None, prefix="", append=None, map=None
):
"""_GetAndMunge for msvs_configuration_attributes."""
return self._GetAndMunge(
self.msvs_configuration_attributes[config],
path,
default,
prefix,
append,
map,
)
def AdjustIncludeDirs(self, include_dirs, config):
"""Updates include_dirs to expand VS specific paths, and adds the system
include dirs used for platform SDK and similar."""
config = self._TargetConfig(config)
includes = include_dirs + self.msvs_system_include_dirs[config]
includes.extend(
self._Setting(
("VCCLCompilerTool", "AdditionalIncludeDirectories"), config, default=[]
)
)
return [self.ConvertVSMacros(p, config=config) for p in includes]
def AdjustMidlIncludeDirs(self, midl_include_dirs, config):
"""Updates midl_include_dirs to expand VS specific paths, and adds the
system include dirs used for platform SDK and similar."""
config = self._TargetConfig(config)
includes = midl_include_dirs + self.msvs_system_include_dirs[config]
includes.extend(
self._Setting(
("VCMIDLTool", "AdditionalIncludeDirectories"), config, default=[]
)
)
return [self.ConvertVSMacros(p, config=config) for p in includes]
def GetComputedDefines(self, config):
"""Returns the set of defines that are injected to the defines list based
on other VS settings."""
config = self._TargetConfig(config)
defines = []
if self._ConfigAttrib(["CharacterSet"], config) == "1":
defines.extend(("_UNICODE", "UNICODE"))
if self._ConfigAttrib(["CharacterSet"], config) == "2":
defines.append("_MBCS")
defines.extend(
self._Setting(
("VCCLCompilerTool", "PreprocessorDefinitions"), config, default=[]
)
)
return defines
def GetCompilerPdbName(self, config, expand_special):
"""Get the pdb file name that should be used for compiler invocations, or
None if there's no explicit name specified."""
config = self._TargetConfig(config)
pdbname = self._Setting(("VCCLCompilerTool", "ProgramDataBaseFileName"), config)
if pdbname:
pdbname = expand_special(self.ConvertVSMacros(pdbname))
return pdbname
def GetMapFileName(self, config, expand_special):
"""Gets the explicitly overridden map file name for a target or returns None
if it's not set."""
config = self._TargetConfig(config)
map_file = self._Setting(("VCLinkerTool", "MapFileName"), config)
if map_file:
map_file = expand_special(self.ConvertVSMacros(map_file, config=config))
return map_file
def GetOutputName(self, config, expand_special):
"""Gets the explicitly overridden output name for a target or returns None
if it's not overridden."""
config = self._TargetConfig(config)
type = self.spec["type"]
root = "VCLibrarianTool" if type == "static_library" else "VCLinkerTool"
# TODO(scottmg): Handle OutputDirectory without OutputFile.
output_file = self._Setting((root, "OutputFile"), config)
if output_file:
output_file = expand_special(
self.ConvertVSMacros(output_file, config=config)
)
return output_file
def GetPDBName(self, config, expand_special, default):
"""Gets the explicitly overridden pdb name for a target or returns
default if it's not overridden, or if no pdb will be generated."""
config = self._TargetConfig(config)
output_file = self._Setting(("VCLinkerTool", "ProgramDatabaseFile"), config)
generate_debug_info = self._Setting(
("VCLinkerTool", "GenerateDebugInformation"), config
)
if generate_debug_info == "true":
if output_file:
return expand_special(self.ConvertVSMacros(output_file, config=config))
else:
return default
else:
return None
def GetNoImportLibrary(self, config):
"""If NoImportLibrary: true, ninja will not expect the output to include
an import library."""
config = self._TargetConfig(config)
noimplib = self._Setting(("NoImportLibrary",), config)
return noimplib == "true"
def GetAsmflags(self, config):
"""Returns the flags that need to be added to ml invocations."""
config = self._TargetConfig(config)
asmflags = []
safeseh = self._Setting(("MASM", "UseSafeExceptionHandlers"), config)
if safeseh == "true":
asmflags.append("/safeseh")
return asmflags
def GetCflags(self, config):
"""Returns the flags that need to be added to .c and .cc compilations."""
config = self._TargetConfig(config)
cflags = []
cflags.extend(["/wd" + w for w in self.msvs_disabled_warnings[config]])
cl = self._GetWrapper(
self, self.msvs_settings[config], "VCCLCompilerTool", append=cflags
)
cl(
"Optimization",
map={"0": "d", "1": "1", "2": "2", "3": "x"},
prefix="/O",
default="2",
)
cl("InlineFunctionExpansion", prefix="/Ob")
cl("DisableSpecificWarnings", prefix="/wd")
cl("StringPooling", map={"true": "/GF"})
cl("EnableFiberSafeOptimizations", map={"true": "/GT"})
cl("OmitFramePointers", map={"false": "-", "true": ""}, prefix="/Oy")
cl("EnableIntrinsicFunctions", map={"false": "-", "true": ""}, prefix="/Oi")
cl("FavorSizeOrSpeed", map={"1": "t", "2": "s"}, prefix="/O")
cl(
"FloatingPointModel",
map={"0": "precise", "1": "strict", "2": "fast"},
prefix="/fp:",
default="0",
)
cl("CompileAsManaged", map={"false": "", "true": "/clr"})
cl("WholeProgramOptimization", map={"true": "/GL"})
cl("WarningLevel", prefix="/W")
cl("WarnAsError", map={"true": "/WX"})
cl(
"CallingConvention",
map={"0": "d", "1": "r", "2": "z", "3": "v"},
prefix="/G",
)
cl("DebugInformationFormat", map={"1": "7", "3": "i", "4": "I"}, prefix="/Z")
cl("RuntimeTypeInfo", map={"true": "/GR", "false": "/GR-"})
cl("EnableFunctionLevelLinking", map={"true": "/Gy", "false": "/Gy-"})
cl("MinimalRebuild", map={"true": "/Gm"})
cl("BufferSecurityCheck", map={"true": "/GS", "false": "/GS-"})
cl("BasicRuntimeChecks", map={"1": "s", "2": "u", "3": "1"}, prefix="/RTC")
cl(
"RuntimeLibrary",
map={"0": "T", "1": "Td", "2": "D", "3": "Dd"},
prefix="/M",
)
cl("ExceptionHandling", map={"1": "sc", "2": "a"}, prefix="/EH")
cl("DefaultCharIsUnsigned", map={"true": "/J"})
cl(
"TreatWChar_tAsBuiltInType",
map={"false": "-", "true": ""},
prefix="/Zc:wchar_t",
)
cl("EnablePREfast", map={"true": "/analyze"})
cl("AdditionalOptions", prefix="")
cl(
"EnableEnhancedInstructionSet",
map={"1": "SSE", "2": "SSE2", "3": "AVX", "4": "IA32", "5": "AVX2"},
prefix="/arch:",
)
cflags.extend(
[
"/FI" + f
for f in self._Setting(
("VCCLCompilerTool", "ForcedIncludeFiles"), config, default=[]
)
]
)
if self.vs_version.project_version >= 12.0:
# New flag introduced in VS2013 (project version 12.0) Forces writes to
# the program database (PDB) to be serialized through MSPDBSRV.EXE.
# https://msdn.microsoft.com/en-us/library/dn502518.aspx
cflags.append("/FS")
# ninja handles parallelism by itself, don't have the compiler do it too.
cflags = [x for x in cflags if not x.startswith("/MP")]
return cflags
def _GetPchFlags(self, config, extension):
"""Get the flags to be added to the cflags for precompiled header support."""
config = self._TargetConfig(config)
# The PCH is only built once by a particular source file. Usage of PCH must
# only be for the same language (i.e. C vs. C++), so only include the pch
# flags when the language matches.
if self.msvs_precompiled_header[config]:
source_ext = os.path.splitext(self.msvs_precompiled_source[config])[1]
if _LanguageMatchesForPch(source_ext, extension):
pch = self.msvs_precompiled_header[config]
pchbase = os.path.split(pch)[1]
return ["/Yu" + pch, "/FI" + pch, "/Fp${pchprefix}." + pchbase + ".pch"]
return []
def GetCflagsC(self, config):
"""Returns the flags that need to be added to .c compilations."""
config = self._TargetConfig(config)
return self._GetPchFlags(config, ".c")
def GetCflagsCC(self, config):
"""Returns the flags that need to be added to .cc compilations."""
config = self._TargetConfig(config)
return ["/TP"] + self._GetPchFlags(config, ".cc")
def _GetAdditionalLibraryDirectories(self, root, config, gyp_to_build_path):
"""Get and normalize the list of paths in AdditionalLibraryDirectories
setting."""
config = self._TargetConfig(config)
libpaths = self._Setting(
(root, "AdditionalLibraryDirectories"), config, default=[]
)
libpaths = [
os.path.normpath(gyp_to_build_path(self.ConvertVSMacros(p, config=config)))
for p in libpaths
]
return ['/LIBPATH:"' + p + '"' for p in libpaths]
def GetLibFlags(self, config, gyp_to_build_path):
"""Returns the flags that need to be added to lib commands."""
config = self._TargetConfig(config)
libflags = []
lib = self._GetWrapper(
self, self.msvs_settings[config], "VCLibrarianTool", append=libflags
)
libflags.extend(
self._GetAdditionalLibraryDirectories(
"VCLibrarianTool", config, gyp_to_build_path
)
)
lib("LinkTimeCodeGeneration", map={"true": "/LTCG"})
lib(
"TargetMachine",
map={"1": "X86", "17": "X64", "3": "ARM"},
prefix="/MACHINE:",
)
lib("AdditionalOptions")
return libflags
def GetDefFile(self, gyp_to_build_path):
"""Returns the .def file from sources, if any. Otherwise returns None."""
spec = self.spec
if spec["type"] in ("shared_library", "loadable_module", "executable"):
def_files = [
s for s in spec.get("sources", []) if s.lower().endswith(".def")
]
if len(def_files) == 1:
return gyp_to_build_path(def_files[0])
elif len(def_files) > 1:
raise Exception("Multiple .def files")
return None
def _GetDefFileAsLdflags(self, ldflags, gyp_to_build_path):
""".def files get implicitly converted to a ModuleDefinitionFile for the
linker in the VS generator. Emulate that behaviour here."""
def_file = self.GetDefFile(gyp_to_build_path)
if def_file:
ldflags.append('/DEF:"%s"' % def_file)
def GetPGDName(self, config, expand_special):
"""Gets the explicitly overridden pgd name for a target or returns None
if it's not overridden."""
config = self._TargetConfig(config)
output_file = self._Setting(("VCLinkerTool", "ProfileGuidedDatabase"), config)
if output_file:
output_file = expand_special(
self.ConvertVSMacros(output_file, config=config)
)
return output_file
def GetLdflags(
self,
config,
gyp_to_build_path,
expand_special,
manifest_base_name,
output_name,
is_executable,
build_dir,
):
"""Returns the flags that need to be added to link commands, and the
manifest files."""
config = self._TargetConfig(config)
ldflags = []
ld = self._GetWrapper(
self, self.msvs_settings[config], "VCLinkerTool", append=ldflags
)
self._GetDefFileAsLdflags(ldflags, gyp_to_build_path)
ld("GenerateDebugInformation", map={"true": "/DEBUG"})
# TODO: These 'map' values come from machineTypeOption enum,
# and does not have an official value for ARM64 in VS2017 (yet).
# It needs to verify the ARM64 value when machineTypeOption is updated.
ld(
"TargetMachine",
map={"1": "X86", "17": "X64", "3": "ARM", "18": "ARM64"},
prefix="/MACHINE:",
)
ldflags.extend(
self._GetAdditionalLibraryDirectories(
"VCLinkerTool", config, gyp_to_build_path
)
)
ld("DelayLoadDLLs", prefix="/DELAYLOAD:")
ld("TreatLinkerWarningAsErrors", prefix="/WX", map={"true": "", "false": ":NO"})
out = self.GetOutputName(config, expand_special)
if out:
ldflags.append("/OUT:" + out)
pdb = self.GetPDBName(config, expand_special, output_name + ".pdb")
if pdb:
ldflags.append("/PDB:" + pdb)
pgd = self.GetPGDName(config, expand_special)
if pgd:
ldflags.append("/PGD:" + pgd)
map_file = self.GetMapFileName(config, expand_special)
ld("GenerateMapFile", map={"true": "/MAP:" + map_file if map_file else "/MAP"})
ld("MapExports", map={"true": "/MAPINFO:EXPORTS"})
ld("AdditionalOptions", prefix="")
minimum_required_version = self._Setting(
("VCLinkerTool", "MinimumRequiredVersion"), config, default=""
)
if minimum_required_version:
minimum_required_version = "," + minimum_required_version
ld(
"SubSystem",
map={
"1": "CONSOLE%s" % minimum_required_version,
"2": "WINDOWS%s" % minimum_required_version,
},
prefix="/SUBSYSTEM:",
)
stack_reserve_size = self._Setting(
("VCLinkerTool", "StackReserveSize"), config, default=""
)
if stack_reserve_size:
stack_commit_size = self._Setting(
("VCLinkerTool", "StackCommitSize"), config, default=""
)
if stack_commit_size:
stack_commit_size = "," + stack_commit_size
ldflags.append(f"/STACK:{stack_reserve_size}{stack_commit_size}")
ld("TerminalServerAware", map={"1": ":NO", "2": ""}, prefix="/TSAWARE")
ld("LinkIncremental", map={"1": ":NO", "2": ""}, prefix="/INCREMENTAL")
ld("BaseAddress", prefix="/BASE:")
ld("FixedBaseAddress", map={"1": ":NO", "2": ""}, prefix="/FIXED")
ld("RandomizedBaseAddress", map={"1": ":NO", "2": ""}, prefix="/DYNAMICBASE")
ld("DataExecutionPrevention", map={"1": ":NO", "2": ""}, prefix="/NXCOMPAT")
ld("OptimizeReferences", map={"1": "NOREF", "2": "REF"}, prefix="/OPT:")
ld("ForceSymbolReferences", prefix="/INCLUDE:")
ld("EnableCOMDATFolding", map={"1": "NOICF", "2": "ICF"}, prefix="/OPT:")
ld(
"LinkTimeCodeGeneration",
map={"1": "", "2": ":PGINSTRUMENT", "3": ":PGOPTIMIZE", "4": ":PGUPDATE"},
prefix="/LTCG",
)
ld("IgnoreDefaultLibraryNames", prefix="/NODEFAULTLIB:")
ld("ResourceOnlyDLL", map={"true": "/NOENTRY"})
ld("EntryPointSymbol", prefix="/ENTRY:")
ld("Profile", map={"true": "/PROFILE"})
ld("LargeAddressAware", map={"1": ":NO", "2": ""}, prefix="/LARGEADDRESSAWARE")
# TODO(scottmg): This should sort of be somewhere else (not really a flag).
ld("AdditionalDependencies", prefix="")
if self.GetArch(config) == "x86":
safeseh_default = "true"
else:
safeseh_default = None
ld(
"ImageHasSafeExceptionHandlers",
map={"false": ":NO", "true": ""},
prefix="/SAFESEH",
default=safeseh_default,
)
# If the base address is not specifically controlled, DYNAMICBASE should
# be on by default.
if not any("DYNAMICBASE" in flag or flag == "/FIXED" for flag in ldflags):
ldflags.append("/DYNAMICBASE")
# If the NXCOMPAT flag has not been specified, default to on. Despite the
# documentation that says this only defaults to on when the subsystem is
# Vista or greater (which applies to the linker), the IDE defaults it on
# unless it's explicitly off.
if not any("NXCOMPAT" in flag for flag in ldflags):
ldflags.append("/NXCOMPAT")
have_def_file = any(flag.startswith("/DEF:") for flag in ldflags)
(
manifest_flags,
intermediate_manifest,
manifest_files,
) = self._GetLdManifestFlags(
config,
manifest_base_name,
gyp_to_build_path,
is_executable and not have_def_file,
build_dir,
)
ldflags.extend(manifest_flags)
return ldflags, intermediate_manifest, manifest_files
def _GetLdManifestFlags(
self, config, name, gyp_to_build_path, allow_isolation, build_dir
):
"""Returns a 3-tuple:
- the set of flags that need to be added to the link to generate
a default manifest
- the intermediate manifest that the linker will generate that should be
used to assert it doesn't add anything to the merged one.
- the list of all the manifest files to be merged by the manifest tool and
included into the link."""
generate_manifest = self._Setting(
("VCLinkerTool", "GenerateManifest"), config, default="true"
)
if generate_manifest != "true":
# This means not only that the linker should not generate the intermediate
# manifest but also that the manifest tool should do nothing even when
# additional manifests are specified.
return ["/MANIFEST:NO"], [], []
output_name = name + ".intermediate.manifest"
flags = [
"/MANIFEST",
"/ManifestFile:" + output_name,
]
# Instead of using the MANIFESTUAC flags, we generate a .manifest to
# include into the list of manifests. This allows us to avoid the need to
# do two passes during linking. The /MANIFEST flag and /ManifestFile are
# still used, and the intermediate manifest is used to assert that the
# final manifest we get from merging all the additional manifest files
# (plus the one we generate here) isn't modified by merging the
# intermediate into it.
# Always NO, because we generate a manifest file that has what we want.
flags.append("/MANIFESTUAC:NO")
config = self._TargetConfig(config)
enable_uac = self._Setting(
("VCLinkerTool", "EnableUAC"), config, default="true"
)
manifest_files = []
generated_manifest_outer = (
"<?xml version='1.0' encoding='UTF-8' standalone='yes'?>"
"<assembly xmlns='urn:schemas-microsoft-com:asm.v1' manifestVersion='1.0'>"
"%s</assembly>"
)
if enable_uac == "true":
execution_level = self._Setting(
("VCLinkerTool", "UACExecutionLevel"), config, default="0"
)
execution_level_map = {
"0": "asInvoker",
"1": "highestAvailable",
"2": "requireAdministrator",
}
ui_access = self._Setting(
("VCLinkerTool", "UACUIAccess"), config, default="false"
)
inner = """
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
<security>
<requestedPrivileges>
<requestedExecutionLevel level='{}' uiAccess='{}' />
</requestedPrivileges>
</security>
</trustInfo>""".format(
execution_level_map[execution_level],
ui_access,
)
else:
inner = ""
generated_manifest_contents = generated_manifest_outer % inner
generated_name = name + ".generated.manifest"
# Need to join with the build_dir here as we're writing it during
# generation time, but we return the un-joined version because the build
# will occur in that directory. We only write the file if the contents
# have changed so that simply regenerating the project files doesn't
# cause a relink.
build_dir_generated_name = os.path.join(build_dir, generated_name)
gyp.common.EnsureDirExists(build_dir_generated_name)
f = gyp.common.WriteOnDiff(build_dir_generated_name)
f.write(generated_manifest_contents)
f.close()
manifest_files = [generated_name]
if allow_isolation:
flags.append("/ALLOWISOLATION")
manifest_files += self._GetAdditionalManifestFiles(config, gyp_to_build_path)
return flags, output_name, manifest_files
def _GetAdditionalManifestFiles(self, config, gyp_to_build_path):
"""Gets additional manifest files that are added to the default one
generated by the linker."""
files = self._Setting(
("VCManifestTool", "AdditionalManifestFiles"), config, default=[]
)
if isinstance(files, str):
files = files.split(";")
return [
os.path.normpath(gyp_to_build_path(self.ConvertVSMacros(f, config=config)))
for f in files
]
def IsUseLibraryDependencyInputs(self, config):
"""Returns whether the target should be linked via Use Library Dependency
Inputs (using component .objs of a given .lib)."""
config = self._TargetConfig(config)
uldi = self._Setting(("VCLinkerTool", "UseLibraryDependencyInputs"), config)
return uldi == "true"
def IsEmbedManifest(self, config):
"""Returns whether manifest should be linked into binary."""
config = self._TargetConfig(config)
embed = self._Setting(
("VCManifestTool", "EmbedManifest"), config, default="true"
)
return embed == "true"
def IsLinkIncremental(self, config):
"""Returns whether the target should be linked incrementally."""
config = self._TargetConfig(config)
link_inc = self._Setting(("VCLinkerTool", "LinkIncremental"), config)
return link_inc != "1"
def GetRcflags(self, config, gyp_to_ninja_path):
"""Returns the flags that need to be added to invocations of the resource
compiler."""
config = self._TargetConfig(config)
rcflags = []
rc = self._GetWrapper(
self, self.msvs_settings[config], "VCResourceCompilerTool", append=rcflags
)
rc("AdditionalIncludeDirectories", map=gyp_to_ninja_path, prefix="/I")
rcflags.append("/I" + gyp_to_ninja_path("."))
rc("PreprocessorDefinitions", prefix="/d")
# /l arg must be in hex without leading '0x'
rc("Culture", prefix="/l", map=lambda x: hex(int(x))[2:])
return rcflags
def BuildCygwinBashCommandLine(self, args, path_to_base):
"""Build a command line that runs args via cygwin bash. We assume that all
incoming paths are in Windows normpath'd form, so they need to be
converted to posix style for the part of the command line that's passed to
bash. We also have to do some Visual Studio macro emulation here because
various rules use magic VS names for things. Also note that rules that
contain ninja variables cannot be fixed here (for example ${source}), so
the outer generator needs to make sure that the paths that are written out
are in posix style, if the command line will be used here."""
cygwin_dir = os.path.normpath(
os.path.join(path_to_base, self.msvs_cygwin_dirs[0])
)
cd = ("cd %s" % path_to_base).replace("\\", "/")
args = [a.replace("\\", "/").replace('"', '\\"') for a in args]
args = ["'%s'" % a.replace("'", "'\\''") for a in args]
bash_cmd = " ".join(args)
cmd = (
'call "%s\\setup_env.bat" && set CYGWIN=nontsec && ' % cygwin_dir
+ f'bash -c "{cd} ; {bash_cmd}"'
)
return cmd
def IsRuleRunUnderCygwin(self, rule):
"""Determine if an action should be run under cygwin. If the variable is
unset, or set to 1 we use cygwin."""
return (
int(rule.get("msvs_cygwin_shell", self.spec.get("msvs_cygwin_shell", 1)))
!= 0
)
def _HasExplicitRuleForExtension(self, spec, extension):
"""Determine if there's an explicit rule for a particular extension."""
for rule in spec.get("rules", []):
if rule["extension"] == extension:
return True
return False
def _HasExplicitIdlActions(self, spec):
"""Determine if an action should not run midl for .idl files."""
return any(
[action.get("explicit_idl_action", 0) for action in spec.get("actions", [])]
)
def HasExplicitIdlRulesOrActions(self, spec):
"""Determine if there's an explicit rule or action for idl files. When
there isn't we need to generate implicit rules to build MIDL .idl files."""
return self._HasExplicitRuleForExtension(
spec, "idl"
) or self._HasExplicitIdlActions(spec)
def HasExplicitAsmRules(self, spec):
"""Determine if there's an explicit rule for asm files. When there isn't we
need to generate implicit rules to assemble .asm files."""
return self._HasExplicitRuleForExtension(spec, "asm")
def GetIdlBuildData(self, source, config):
"""Determine the implicit outputs for an idl file. Returns output
directory, outputs, and variables and flags that are required."""
config = self._TargetConfig(config)
midl_get = self._GetWrapper(self, self.msvs_settings[config], "VCMIDLTool")
def midl(name, default=None):
return self.ConvertVSMacros(midl_get(name, default=default), config=config)
tlb = midl("TypeLibraryName", default="${root}.tlb")
header = midl("HeaderFileName", default="${root}.h")
dlldata = midl("DLLDataFileName", default="dlldata.c")
iid = midl("InterfaceIdentifierFileName", default="${root}_i.c")
proxy = midl("ProxyFileName", default="${root}_p.c")
# Note that .tlb is not included in the outputs as it is not always
# generated depending on the content of the input idl file.
outdir = midl("OutputDirectory", default="")
output = [header, dlldata, iid, proxy]
variables = [
("tlb", tlb),
("h", header),
("dlldata", dlldata),
("iid", iid),
("proxy", proxy),
]
# TODO(scottmg): Are there configuration settings to set these flags?
target_platform = self.GetArch(config)
if target_platform == "x86":
target_platform = "win32"
flags = ["/char", "signed", "/env", target_platform, "/Oicf"]
return outdir, output, variables, flags
def _LanguageMatchesForPch(source_ext, pch_source_ext):
c_exts = (".c",)
cc_exts = (".cc", ".cxx", ".cpp")
return (source_ext in c_exts and pch_source_ext in c_exts) or (
source_ext in cc_exts and pch_source_ext in cc_exts
)
class PrecompiledHeader:
"""Helper to generate dependencies and build rules to handle generation of
precompiled headers. Interface matches the GCH handler in xcode_emulation.py.
"""
def __init__(
self, settings, config, gyp_to_build_path, gyp_to_unique_output, obj_ext
):
self.settings = settings
self.config = config
pch_source = self.settings.msvs_precompiled_source[self.config]
self.pch_source = gyp_to_build_path(pch_source)
filename, _ = os.path.splitext(pch_source)
self.output_obj = gyp_to_unique_output(filename + obj_ext).lower()
def _PchHeader(self):
"""Get the header that will appear in an #include line for all source
files."""
return self.settings.msvs_precompiled_header[self.config]
def GetObjDependencies(self, sources, objs, arch):
"""Given a list of sources files and the corresponding object files,
returns a list of the pch files that should be depended upon. The
additional wrapping in the return value is for interface compatibility
with make.py on Mac, and xcode_emulation.py."""
assert arch is None
if not self._PchHeader():
return []
pch_ext = os.path.splitext(self.pch_source)[1]
for source in sources:
if _LanguageMatchesForPch(os.path.splitext(source)[1], pch_ext):
return [(None, None, self.output_obj)]
return []
def GetPchBuildCommands(self, arch):
"""Not used on Windows as there are no additional build steps required
(instead, existing steps are modified in GetFlagsModifications below)."""
return []
def GetFlagsModifications(
self, input, output, implicit, command, cflags_c, cflags_cc, expand_special
):
"""Get the modified cflags and implicit dependencies that should be used
for the pch compilation step."""
if input == self.pch_source:
pch_output = ["/Yc" + self._PchHeader()]
if command == "cxx":
return (
[("cflags_cc", map(expand_special, cflags_cc + pch_output))],
self.output_obj,
[],
)
elif command == "cc":
return (
[("cflags_c", map(expand_special, cflags_c + pch_output))],
self.output_obj,
[],
)
return [], output, implicit
vs_version = None
def GetVSVersion(generator_flags):
global vs_version
if not vs_version:
vs_version = gyp.MSVSVersion.SelectVisualStudioVersion(
generator_flags.get("msvs_version", "auto"), allow_fallback=False
)
return vs_version
def _GetVsvarsSetupArgs(generator_flags, arch):
vs = GetVSVersion(generator_flags)
return vs.SetupScript()
def ExpandMacros(string, expansions):
"""Expand $(Variable) per expansions dict. See MsvsSettings.GetVSMacroEnv
for the canonical way to retrieve a suitable dict."""
if "$" in string:
for old, new in expansions.items():
assert "$(" not in new, new
string = string.replace(old, new)
return string
def _ExtractImportantEnvironment(output_of_set):
"""Extracts environment variables required for the toolchain to run from
a textual dump output by the cmd.exe 'set' command."""
envvars_to_save = (
"goma_.*", # TODO(scottmg): This is ugly, but needed for goma.
"include",
"lib",
"libpath",
"path",
"pathext",
"systemroot",
"temp",
"tmp",
)
env = {}
# This occasionally happens and leads to misleading SYSTEMROOT error messages
# if not caught here.
if output_of_set.count("=") == 0:
raise Exception("Invalid output_of_set. Value is:\n%s" % output_of_set)
for line in output_of_set.splitlines():
for envvar in envvars_to_save:
if re.match(envvar + "=", line.lower()):
var, setting = line.split("=", 1)
if envvar == "path":
# Our own rules (for running gyp-win-tool) and other actions in
# Chromium rely on python being in the path. Add the path to this
# python here so that if it's not in the path when ninja is run
# later, python will still be found.
setting = os.path.dirname(sys.executable) + os.pathsep + setting
env[var.upper()] = setting
break
for required in ("SYSTEMROOT", "TEMP", "TMP"):
if required not in env:
raise Exception(
'Environment variable "%s" '
"required to be set to valid path" % required
)
return env
def _FormatAsEnvironmentBlock(envvar_dict):
"""Format as an 'environment block' directly suitable for CreateProcess.
Briefly this is a list of key=value\0, terminated by an additional \0. See
CreateProcess documentation for more details."""
block = ""
nul = "\0"
for key, value in envvar_dict.items():
block += key + "=" + value + nul
block += nul
return block
def _ExtractCLPath(output_of_where):
"""Gets the path to cl.exe based on the output of calling the environment
setup batch file, followed by the equivalent of `where`."""
# Take the first line, as that's the first found in the PATH.
for line in output_of_where.strip().splitlines():
if line.startswith("LOC:"):
return line[len("LOC:") :].strip()
def GenerateEnvironmentFiles(
toplevel_build_dir, generator_flags, system_includes, open_out
):
"""It's not sufficient to have the absolute path to the compiler, linker,
etc. on Windows, as those tools rely on .dlls being in the PATH. We also
need to support both x86 and x64 compilers within the same build (to support
msvs_target_platform hackery). Different architectures require a different
compiler binary, and different supporting environment variables (INCLUDE,
LIB, LIBPATH). So, we extract the environment here, wrap all invocations
of compiler tools (cl, link, lib, rc, midl, etc.) via win_tool.py which
sets up the environment, and then we do not prefix the compiler with
an absolute path, instead preferring something like "cl.exe" in the rule
which will then run whichever the environment setup has put in the path.
When the following procedure to generate environment files does not
meet your requirement (e.g. for custom toolchains), you can pass
"-G ninja_use_custom_environment_files" to the gyp to suppress file
generation and use custom environment files prepared by yourself."""
archs = ("x86", "x64")
if generator_flags.get("ninja_use_custom_environment_files", 0):
cl_paths = {}
for arch in archs:
cl_paths[arch] = "cl.exe"
return cl_paths
vs = GetVSVersion(generator_flags)
cl_paths = {}
for arch in archs:
# Extract environment variables for subprocesses.
args = vs.SetupScript(arch)
args.extend(("&&", "set"))
popen = subprocess.Popen(
args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
)
variables = popen.communicate()[0].decode("utf-8")
if popen.returncode != 0:
raise Exception('"%s" failed with error %d' % (args, popen.returncode))
env = _ExtractImportantEnvironment(variables)
# Inject system includes from gyp files into INCLUDE.
if system_includes:
system_includes = system_includes | OrderedSet(
env.get("INCLUDE", "").split(";")
)
env["INCLUDE"] = ";".join(system_includes)
env_block = _FormatAsEnvironmentBlock(env)
f = open_out(os.path.join(toplevel_build_dir, "environment." + arch), "w")
f.write(env_block)
f.close()
# Find cl.exe location for this architecture.
args = vs.SetupScript(arch)
args.extend(
("&&", "for", "%i", "in", "(cl.exe)", "do", "@echo", "LOC:%~$PATH:i")
)
popen = subprocess.Popen(args, shell=True, stdout=subprocess.PIPE)
output = popen.communicate()[0].decode("utf-8")
cl_paths[arch] = _ExtractCLPath(output)
return cl_paths
def VerifyMissingSources(sources, build_dir, generator_flags, gyp_to_ninja):
"""Emulate behavior of msvs_error_on_missing_sources present in the msvs
generator: Check that all regular source files, i.e. not created at run time,
exist on disk. Missing files cause needless recompilation when building via
VS, and we want this check to match for people/bots that build using ninja,
so they're not surprised when the VS build fails."""
if int(generator_flags.get("msvs_error_on_missing_sources", 0)):
no_specials = filter(lambda x: "$" not in x, sources)
relative = [os.path.join(build_dir, gyp_to_ninja(s)) for s in no_specials]
missing = [x for x in relative if not os.path.exists(x)]
if missing:
# They'll look like out\Release\..\..\stuff\things.cc, so normalize the
# path for a slightly less crazy looking output.
cleaned_up = [os.path.normpath(x) for x in missing]
raise Exception("Missing input files:\n%s" % "\n".join(cleaned_up))
# Sets some values in default_variables, which are required for many
# generators, run on Windows.
def CalculateCommonVariables(default_variables, params):
generator_flags = params.get("generator_flags", {})
# Set a variable so conditions can be based on msvs_version.
msvs_version = gyp.msvs_emulation.GetVSVersion(generator_flags)
default_variables["MSVS_VERSION"] = msvs_version.ShortName()
# To determine processor word size on Windows, in addition to checking
# PROCESSOR_ARCHITECTURE (which reflects the word size of the current
# process), it is also necessary to check PROCESSOR_ARCHITEW6432 (which
# contains the actual word size of the system when running thru WOW64).
if "64" in os.environ.get("PROCESSOR_ARCHITECTURE", "") or "64" in os.environ.get(
"PROCESSOR_ARCHITEW6432", ""
):
default_variables["MSVS_OS_BITS"] = 64
else:
default_variables["MSVS_OS_BITS"] = 32
| EncodeRspFileList |
set_candidate_on_test.go | package transaction
import (
"testing"
)
const txSetCandidateOn = "0xf872010201010aa2e1a00eb98ea04ae466d8d38f490db3c99b3996a90e24243952ce9822c6dc1e2c1a43808001b845f8431ba0efff777e61a78141ceeab311776dfd0bfc6745f125c688db86ccfa350d3d3b84a074419c32dd0d1d2ebdc1c5bfdffb238d2ef88a618e28a2ce2410880264d3b3cc"
func TestTransactionSetCandidateOn_Sign(t *testing.T) {
data := NewSetCandidateOnData().
MustSetPubKey("Mp0eb98ea04ae466d8d38f490db3c99b3996a90e24243952ce9822c6dc1e2c1a43")
tx, err := NewBuilder(TestNetChainID).NewTransaction(data)
if err != nil {
t.Fatal(err)
}
transaction := tx.SetNonce(1).SetGasPrice(1).SetGasCoin(1)
signedTx, err := transaction.Sign("05ddcd4e6f7d248ed1388f0091fe345bf9bf4fc2390384e26005e7675c98b3c1")
if err != nil {
t.Fatal(err)
}
encode, err := signedTx.Encode()
if err != nil {
t.Fatal(err)
}
if encode != txSetCandidateOn {
t.Errorf("EncodeTx got %s, want %s", string(encode), txSetCandidateOn)
}
}
func TestDecode_setCandidateOn(t *testing.T) | {
decode, err := Decode(txSetCandidateOn)
if err != nil {
t.Fatal(err)
}
if decode.Fee().String() != "10000000000000000000" {
t.Error("set candidate on transaction fee is invalid", decode.Fee().String())
}
} |
|
plot_position.py | import numpy as np
import rosbag
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from tf.transformations import euler_from_quaternion
# Read bag file
bag = rosbag.Bag('2021-09-21-19-57-22.bag')
x = []
y = []
z = []
roll = []
pitch = []
yaw = []
time = []
cycles = []
cycle_time = []
init_time = 0
for topic, msg, t in bag.read_messages(topics=['/mavros/local_position/pose', '/mavros/path_cycle']):
if topic == '/mavros/local_position/pose':
current_time = t.to_sec()
x.append(msg.pose.position.x)
y.append(msg.pose.position.y) |
orientation_q = msg.pose.orientation
orientation_list = [orientation_q.x, orientation_q.y, orientation_q.z, orientation_q.w]
(r, p, ya) = euler_from_quaternion (orientation_list)
roll.append(r)
pitch.append(p)
yaw.append(ya)
if init_time == 0:
time.append(0)
init_time = current_time
else:
time.append(current_time - init_time)
else:
cycles.append(msg.cycle)
cycle_time.append(t.to_sec() - init_time)
data = np.stack((x, y, z, roll, pitch, yaw, time))
cycles.append(5)
cycle_step = 0
cycle_data = {}
past_idx = 0
for idx, tim in enumerate(time):
if cycle_time[cycle_step] < tim:
cycle_data['cycle_{}'.format(cycle_step)] = data[:, past_idx:idx]
cycle_step += 1
past_idx = idx
if cycle_step > 4:
cycle_data['cycle_{}'.format(cycle_step)] = data[:, idx+1:]
break
## Plot position ##
##################
fig1, ax1 = plt.subplots(figsize=(20,20))
ax1.set_ylim([-3, 33])
ax1.set_xlim([0, 320])
ax1.plot(time, x, linewidth=2.5, label='x')
ax1.plot(time, y, linewidth=2.5, label='y')
ax1.plot(time, z, linewidth=2.5, label='z')
ax1.set_title("XYZ Position", fontweight = 'heavy')
ax1.set(xlabel="Time [s]", ylabel="Distance [m]")
ax1.legend(shadow=True, fancybox=True, loc='upper right')
for value in [5, 10, 25]:
ax1.axhline(y=value, color='k', linestyle='--', alpha=0.4)
## Plot orientation ##
######################
fig2, ax2 = plt.subplots(figsize=(20,20))
ax2.set_ylim([-1, 1.5])
ax2.set_xlim([0, 320])
ax2.plot(time, roll, linewidth=2.5, label='roll')
ax2.plot(time, pitch, linewidth=2.5, label='pitch')
ax2.plot(time, yaw, linewidth=2.5, label='yaw')
ax2.set_title("RPY Orientation", fontweight = 'heavy')
ax2.set(xlabel="Time [s]", ylabel="Angle [rad]")
ax2.legend(shadow=True, fancybox=True, loc='upper right')
last_tim = 0
for c, tim in enumerate(cycle_time):
ax1.axvline(x=tim, color='k', linestyle='--', alpha=0.4)
ax1.annotate(s='', xy=(last_tim,28), xytext=(tim,28), arrowprops=dict(arrowstyle='<->'))
ax2.axvline(x=tim, color='k', linestyle='--', alpha=0.4)
ax2.annotate(s='', xy=(last_tim,1), xytext=(tim,1), arrowprops=dict(arrowstyle='<->'))
if c == 0:
l = "Takeoff"
else:
l = "Cycle {}".format(c)
ax1.text((tim-last_tim)/2 + last_tim, 29.1, l, horizontalalignment='center',
verticalalignment='center', weight='bold')
ax2.text((tim-last_tim)/2 + last_tim, 1.1, l, horizontalalignment='center',
verticalalignment='center', weight='bold')
last_tim = tim
ax1.annotate(s='', xy=(last_tim,28), xytext=(data[6, -1],28), arrowprops=dict(arrowstyle='<->'))
ax1.text((data[6, -1]-last_tim)/2 + last_tim, 29.5, 'Landing', horizontalalignment='center',
verticalalignment='center', fontsize=10, weight='bold')
ax2.annotate(s='', xy=(last_tim,1), xytext=(data[6, -1],1), arrowprops=dict(arrowstyle='<->'))
ax2.text((data[6, -1]-last_tim)/2 + last_tim, 1.1, 'Landing', horizontalalignment='center',
verticalalignment='center', fontsize=10, weight='bold')
## Position 3D plot ##
######################
xs = [0, 0, 10, 10, 10]
ys = [0, 0, 0, 0, 5]
zs = [0, 10, 10, 25, 25]
fig3 = plt.figure(figsize=(20, 20))
ax3 = Axes3D(fig3, alpha=0.1)
ax3.set_title("3D XYZ Trajectory", fontweight = 'heavy')
for c in cycles:
data = cycle_data['cycle_{}'.format(c)]
if c > 0 and c < 5:
l = 'cycle_{}'.format(c)
elif c == 0:
l = 'takeoff'
else:
l = 'landing'
ax3.plot3D(data[0, :], data[1, :], data[2, :], label=l, linewidth=2.5)
ax3.legend(shadow=True, fancybox=True)
ax3.scatter(xs, ys, zs, s=35, c='k')
for xt, yt, zt in zip(xs, ys, zs):
ax3.text3D(xt + 0.1, yt + 0.1, zt + 0.1, '({},{},{})'.format(xt, yt, zt),
fontsize=10, fontweight = 'heavy')
ax3.set(xlabel="X [m]", ylabel="Y [m]", zlabel="Z [m]")
## Plot trajectories in X-Y X-Z & Y-Z planes ##
###############################################
fig4 = plt.figure(figsize=(20,20))
ax4 = fig4.add_subplot(131)
ax5 = fig4.add_subplot(132)
ax6 = fig4.add_subplot(133)
for c in cycles:
data = cycle_data['cycle_{}'.format(c)]
if c > 0 and c < 5:
l = 'cycle_{}'.format(c)
elif c == 0:
l = 'takeoff'
else:
l = 'landing'
ax4.plot(data[0, :], data[1, :], label=l, linewidth=2.5)
ax5.plot(data[0, :], data[2, :], label=l, linewidth=2.5)
ax6.plot(data[1, :], data[2, :], label=l, linewidth=2.5)
ax4.set_title("Trajectory XY", fontweight = 'heavy')
ax4.set(xlabel="X [m]", ylabel="Y [m]")
ax4.legend(shadow=True, fancybox=True, loc='upper left')
ax5.set_title("Trajectory XZ", fontweight = 'heavy')
ax5.set(xlabel="X [m]", ylabel="Z [m]")
ax5.legend(shadow=True, fancybox=True, loc='lower right')
ax6.set_title("Trajectory YZ", fontweight = 'heavy')
ax6.set(xlabel="Y [m]", ylabel="Z [m]")
ax6.legend(shadow=True, fancybox=True, loc='lower right')
for xt, yt, zt in zip(xs, ys, zs):
ax4.text(xt + 0.2, yt + 0.2, '({},{})'.format(xt, yt),
fontsize=10, fontweight = 'heavy')
ax5.text(xt + 0.2, zt + 0.2, '({},{})'.format(xt, zt),
fontsize=10, fontweight = 'heavy')
ax6.text(yt + 0.2, zt + 0.2, '({},{})'.format(yt, zt),
fontsize=10, fontweight = 'heavy')
plt.show() | z.append(msg.pose.position.z) |
gen_reference_doc.py | #!/usr/bin/env python
import glob
import os
import sys
verbose = '--verbose' in sys.argv
dump = '--dump' in sys.argv
internal = '--internal' in sys.argv
plain_output = '--plain-output' in sys.argv
if plain_output:
plain_file = open('plain_text_out.txt', 'w+')
in_code = None
paths = ['include/libtorrent/*.hpp', 'include/libtorrent/kademlia/*.hpp', 'include/libtorrent/extensions/*.hpp']
if internal:
paths.append('include/libtorrent/aux_/*.hpp')
files = []
for p in paths:
files.extend(glob.glob(os.path.join('..', p)))
| classes = []
enums = []
# maps filename to overview description
overviews = {}
# maps names -> URL
symbols = {}
# some files that need pre-processing to turn symbols into
# links into the reference documentation
preprocess_rst = \
{
'manual.rst':'manual-ref.rst',
'settings.rst':'settings-ref.rst'
}
# some pre-defined sections from the main manual
symbols = \
{
"queuing_": "manual-ref.html#queuing",
"fast-resume_": "manual-ref.html#fast-resume",
"storage-allocation_": "manual-ref.html#storage-allocation",
"alerts_": "manual-ref.html#alerts",
"upnp-and-nat-pmp_": "manual-ref.html#upnp-and-nat-pmp",
"http-seeding_": "manual-ref.html#http-seeding",
"metadata-from-peers_": "manual-ref.html#metadata-from-peers",
"magnet-links_": "manual-ref.html#magnet-links",
"ssl-torrents_": "manual-ref.html#ssl-torrents",
"dynamic-loading-of-torrent-files_": "manual-ref.html#dynamic-loading-of-torrent-files",
"session-statistics_": "manual-ref.html#session-statistics",
"peer-classes_": "manual-ref.html#peer-classes"
}
static_links = \
{
".. _`BEP 3`: http://bittorrent.org/beps/bep_0003.html",
".. _`BEP 17`: http://bittorrent.org/beps/bep_0017.html",
".. _`BEP 19`: http://bittorrent.org/beps/bep_0019.html"
}
anon_index = 0
category_mapping = {
'ed25519.hpp': 'ed25519',
'session.hpp': 'Core',
'add_torrent_params.hpp': 'Core',
'session_status.hpp': 'Core',
'error_code.hpp': 'Error Codes',
'file.hpp': 'File',
'storage.hpp': 'Custom Storage',
'storage_defs.hpp': 'Storage',
'file_storage.hpp': 'Storage',
'file_pool.hpp': 'Custom Storage',
'extensions.hpp': 'Plugins',
'ut_metadata.hpp': 'Plugins',
'ut_pex.hpp': 'Plugins',
'ut_trackers.hpp': 'Plugins',
'metadata_transfer.hpp': 'Plugins',
'smart_ban.hpp': 'Plugins',
'lt_trackers.hpp': 'Plugins',
'create_torrent.hpp': 'Create Torrents',
'alert.hpp': 'Alerts',
'alert_types.hpp': 'Alerts',
'bencode.hpp': 'Bencoding',
'lazy_entry.hpp': 'Bencoding',
'bdecode.hpp': 'Bdecoding',
'entry.hpp': 'Bencoding',
'time.hpp': 'Time',
'escape_string.hpp': 'Utility',
'enum_net.hpp': 'Network',
'broadcast_socket.hpp': 'Network',
'socket.hpp': 'Network',
'socket_io.hpp': 'Network',
'bitfield.hpp': 'Utility',
'sha1_hash.hpp': 'Utility',
'hasher.hpp': 'Utility',
'identify_client.hpp': 'Utility',
'thread.hpp': 'Utility',
'ip_filter.hpp': 'Filter',
'session_settings.hpp': 'Settings',
'settings_pack.hpp': 'Settings',
'operations.hpp': 'Alerts',
'disk_buffer_holder.hpp': 'Custom Storage',
'alert_dispatcher.hpp': 'Alerts',
}
category_fun_mapping = {
'min_memory_usage()': 'Settings',
'high_performance_seed()': 'Settings',
'cache_status': 'Core',
}
def categorize_symbol(name, filename):
f = os.path.split(filename)[1]
if name.endswith('_category()') \
or name.endswith('_error_code') \
or name.endswith('error_code_enum'):
return 'Error Codes'
if name in category_fun_mapping:
return category_fun_mapping[name]
if f in category_mapping:
return category_mapping[f]
return 'Core'
def suppress_warning(filename, name):
f = os.path.split(filename)[1]
if f != 'alert_types.hpp': return False
# if name.endswith('_alert') or name == 'message()':
return True
# return False
def first_item(itr):
for i in itr:
return i
return None
def is_visible(desc):
if desc.strip().startswith('hidden'): return False
if internal: return True
if desc.strip().startswith('internal'): return False
return True
def highlight_signature(s):
s = s.replace('TORRENT_OVERRIDE', 'override').replace('TORRENT_FINAL', 'final')
name = s.split('(', 1)
name2 = name[0].split(' ')
if len(name2[-1]) == 0: return s
# make the name of the function bold
name2[-1] = '**' + name2[-1] + '** '
# if there is a return value, make sure we preserve pointer types
if len(name2) > 1:
name2[0] = name2[0].replace('*', '\\*')
name[0] = ' '.join(name2)
# we have to escape asterisks, since this is rendered into
# a parsed literal in rst
name[1] = name[1].replace('*', '\\*')
# we also have to escape colons
name[1] = name[1].replace(':', '\\:')
# escape trailing underscores
name[1] = name[1].replace('_', '\\_')
# comments in signatures are italic
name[1] = name[1].replace('/\\*', '*/\\*')
name[1] = name[1].replace('\\*/', '\\*/*')
return '('.join(name)
def html_sanitize(s):
ret = ''
for i in s:
if i == '<': ret += '<'
elif i == '>': ret += '>'
elif i == '&': ret += '&'
else: ret += i
return ret
def looks_like_namespace(line):
line = line.strip()
if line.startswith('namespace'): return True
return False
def looks_like_blank(line):
line = line.split('//')[0]
line = line.replace('{', '')
line = line.replace('}', '')
line = line.replace('[', '')
line = line.replace(']', '')
line = line.replace(';', '')
line = line.strip()
return len(line) == 0
def looks_like_variable(line):
line = line.split('//')[0]
line = line.strip()
if not ' ' in line and not '\t' in line: return False
if line.startswith('friend '): return False
if line.startswith('enum '): return False
if line.startswith(','): return False
if line.startswith(':'): return False
if line.startswith('typedef'): return False
if ' = ' in line: return True
if line.endswith(';'): return True
return False
def looks_like_forward_decl(line):
line = line.split('//')[0]
line = line.strip()
if not line.endswith(';'): return False
if '{' in line: return False
if '}' in line: return False
if line.startswith('friend '): return True
if line.startswith('struct '): return True
if line.startswith('class '): return True
return False
def looks_like_function(line):
if line.startswith('friend'): return False
if '::' in line.split('(')[0].split(' ')[-1]: return False
if line.startswith(','): return False
if line.startswith(':'): return False
return '(' in line;
def parse_function(lno, lines, filename):
current_fun = {}
start_paren = 0
end_paren = 0
signature = ''
while lno < len(lines):
l = lines[lno].strip()
lno += 1
if l.startswith('//'): continue
start_paren += l.count('(')
end_paren += l.count(')')
sig_line = l.replace('TORRENT_EXPORT ', '').replace('TORRENT_EXTRA_EXPORT','').strip()
if signature != '': sig_line = '\n ' + sig_line
signature += sig_line
if verbose: print 'fun %s' % l
if start_paren > 0 and start_paren == end_paren:
if signature[-1] != ';':
# we also need to consume the function body
start_paren = 0
end_paren = 0
for i in range(len(signature)):
if signature[i] == '(': start_paren += 1
elif signature[i] == ')': end_paren += 1
if start_paren > 0 and start_paren == end_paren:
for k in range(i, len(signature)):
if signature[k] == ':' or signature[k] == '{':
signature = signature[0:k].strip()
break
break
lno = consume_block(lno - 1, lines)
signature += ';'
ret = [{ 'file': filename[11:], 'signatures': set([ signature ]), 'names': set([ signature.split('(')[0].split(' ')[-1].strip() + '()'])}, lno]
if first_item(ret[0]['names']) == '()': return [None, lno]
return ret
if len(signature) > 0:
print '\x1b[31mFAILED TO PARSE FUNCTION\x1b[0m %s\nline: %d\nfile: %s' % (signature, lno, filename)
return [None, lno]
def parse_class(lno, lines, filename):
start_brace = 0
end_brace = 0
name = ''
funs = []
fields = []
enums = []
state = 'public'
context = ''
class_type = 'struct'
blanks = 0
decl = ''
while lno < len(lines):
l = lines[lno].strip()
decl += lines[lno].replace('TORRENT_EXPORT ', '').replace('TORRENT_EXTRA_EXPORT', '').split('{')[0].strip()
if '{' in l: break
if verbose: print 'class %s' % l
lno += 1
if decl.startswith('class'):
state = 'private'
class_type = 'class'
decl = decl.replace('TORRENT_FINAL', 'final')
name = decl.split(':')[0].replace('class ', '').replace('struct ', '').replace('final', '').strip()
while lno < len(lines):
l = lines[lno].strip()
lno += 1
if l == '':
blanks += 1
context = ''
continue
if l.startswith('/*'):
lno = consume_comment(lno - 1, lines)
continue
if l.startswith('#'):
lno = consume_ifdef(lno - 1, lines, True)
continue
if 'TORRENT_DEFINE_ALERT' in l:
if verbose: print 'xx %s' % l
blanks += 1
continue
if 'TORRENT_DEPRECATED' in l:
if verbose: print 'xx %s' % l
blanks += 1
continue
if l.startswith('//'):
if verbose: print 'desc %s' % l
# plain output prints just descriptions and filters out c++ code.
# it's used to run spell checker over
if plain_output:
line = l.split('//')[1]
# if the first character is a space, strip it
if len(line) > 0 and line[0] == ' ': line = line[1:]
global in_code
if in_code != None and not line.startswith(in_code) and len(line) > 1:
in_code = None
if line.strip().startswith('.. code::'):
in_code = line.split('.. code::')[0] + '\t'
# strip out C++ code from the plain text output since it's meant for
# running spell checking over
if not line.strip().startswith('.. ') and in_code == None:
plain_file.write(line + '\n')
l = l[2:]
if len(l) and l[0] == ' ': l = l[1:]
context += l + '\n'
continue
start_brace += l.count('{')
end_brace += l.count('}')
if l == 'private:': state = 'private'
elif l == 'protected:': state = 'protected'
elif l == 'public:': state = 'public'
if start_brace > 0 and start_brace == end_brace:
return [{ 'file': filename[11:], 'enums': enums, 'fields':fields, 'type': class_type, 'name': name, 'decl': decl, 'fun': funs}, lno]
if state != 'public' and not internal:
if verbose: print 'private %s' % l
blanks += 1
continue
if start_brace - end_brace > 1:
if verbose: print 'scope %s' % l
blanks += 1
continue;
if looks_like_function(l):
current_fun, lno = parse_function(lno - 1, lines, filename)
if current_fun != None and is_visible(context):
if context == '' and blanks == 0 and len(funs):
funs[-1]['signatures'].update(current_fun['signatures'])
funs[-1]['names'].update(current_fun['names'])
else:
current_fun['desc'] = context
if context == '' and not suppress_warning(filename, first_item(current_fun['names'])):
print 'WARNING: member function "%s" is not documented: \x1b[34m%s:%d\x1b[0m' \
% (name + '::' + first_item(current_fun['names']), filename, lno)
funs.append(current_fun)
context = ''
blanks = 0
continue
if looks_like_variable(l):
if verbose: print 'var %s' % l
if not is_visible(context):
continue
l = l.split('//')[0].strip()
n = l.split(' ')[-1].split(':')[0].split(';')[0]
if context == '' and blanks == 0 and len(fields):
fields[-1]['names'].append(n)
fields[-1]['signatures'].append(l)
else:
if context == '' and not suppress_warning(filename, n):
print 'WARNING: field "%s" is not documented: \x1b[34m%s:%d\x1b[0m' \
% (name + '::' + n, filename, lno)
fields.append({'signatures': [l], 'names': [n], 'desc': context})
context = ''
blanks = 0
continue
if l.startswith('enum '):
if verbose: print 'enum %s' % l
if not is_visible(context):
consume_block(lno - 1, lines)
else:
enum, lno = parse_enum(lno - 1, lines, filename)
if enum != None:
enum['desc'] = context
if context == '' and not suppress_warning(filename, enum['name']):
print 'WARNING: enum "%s" is not documented: \x1b[34m%s:%d\x1b[0m' \
% (name + '::' + enum['name'], filename, lno)
enums.append(enum)
context = ''
continue
context = ''
if verbose:
if looks_like_forward_decl(l) \
or looks_like_blank(l) \
or looks_like_namespace(l):
print '-- %s' % l
else:
print '?? %s' % l
if len(name) > 0:
print '\x1b[31mFAILED TO PARSE CLASS\x1b[0m %s\nfile: %s:%d' % (name, filename, lno)
return [None, lno]
def parse_enum(lno, lines, filename):
start_brace = 0
end_brace = 0
global anon_index
l = lines[lno].strip()
name = l.replace('enum ', '').split('{')[0].strip()
if len(name) == 0:
if not internal:
print 'WARNING: anonymous enum at: \x1b[34m%s:%d\x1b[0m' % (filename, lno)
lno = consume_block(lno - 1, lines)
return [None, lno]
name = 'anonymous_enum_%d' % anon_index
anon_index += 1
values = []
context = ''
if not '{' in l:
if verbose: print 'enum %s' % lines[lno]
lno += 1
val = 0
while lno < len(lines):
l = lines[lno].strip()
lno += 1
if l.startswith('//'):
if verbose: print 'desc %s' % l
l = l[2:]
if len(l) and l[0] == ' ': l = l[1:]
context += l + '\n'
continue
if l.startswith('#'):
lno = consume_ifdef(lno - 1, lines)
continue
start_brace += l.count('{')
end_brace += l.count('}')
if '{' in l:
l = l.split('{')[1]
l = l.split('}')[0]
if len(l):
if verbose: print 'enumv %s' % lines[lno-1]
for v in l.split(','):
v = v.strip();
if v.startswith('//'): break
if v == '': continue
valstr = ''
try:
if '=' in v: val = int(v.split('=')[1].strip(), 0)
valstr = str(val)
except: pass
if '=' in v: v = v.split('=')[0].strip()
if is_visible(context):
values.append({'name': v.strip(), 'desc': context, 'val': valstr})
if verbose: print 'enumv %s' % valstr
context = ''
val += 1
else:
if verbose: print '?? %s' % lines[lno-1]
if start_brace > 0 and start_brace == end_brace:
return [{'file': filename[11:], 'name': name, 'values': values}, lno]
if len(name) > 0:
print '\x1b[31mFAILED TO PARSE ENUM\x1b[0m %s\nline: %d\nfile: %s' % (name, lno, filename)
return [None, lno]
def consume_block(lno, lines):
start_brace = 0
end_brace = 0
while lno < len(lines):
l = lines[lno].strip()
if verbose: print 'xx %s' % l
lno += 1
start_brace += l.count('{')
end_brace += l.count('}')
if start_brace > 0 and start_brace == end_brace:
break
return lno
def consume_comment(lno, lines):
while lno < len(lines):
l = lines[lno].strip()
if verbose: print 'xx %s' % l
lno += 1
if '*/' in l: break
return lno
def trim_define(l):
return l.replace('#ifndef', '').replace('#ifdef', '') \
.replace('#if', '').replace('defined', '') \
.replace('TORRENT_USE_IPV6', '').replace('TORRENT_NO_DEPRECATE', '') \
.replace('||', '').replace('&&', '').replace('(', '').replace(')','') \
.replace('!', '').replace('\\', '').strip()
def consume_ifdef(lno, lines, warn_on_ifdefs = False):
l = lines[lno].strip()
lno += 1
start_if = 1
end_if = 0
if verbose: print 'prep %s' % l
if warn_on_ifdefs and ('TORRENT_DEBUG' in l):
while l.endswith('\\'):
lno += 1
l += lines[lno].strip()
if verbose: print 'prep %s' % lines[lno].trim()
define = trim_define(l)
print '\x1b[31mWARNING: possible ABI breakage in public struct! "%s" \x1b[34m %s:%d\x1b[0m' % \
(define, filename, lno)
# we've already warned once, no need to do it twice
warn_on_ifdefs = False
if warn_on_ifdefs and '#if' in l:
while l.endswith('\\'):
lno += 1
l += lines[lno].strip()
if verbose: print 'prep %s' % lines[lno].trim()
define = trim_define(l)
if define != '':
print '\x1b[33msensitive define in public struct: "%s"\x1b[34m %s:%d\x1b[0m' % (define, filename, lno)
if l == '#ifndef TORRENT_NO_DEPRECATE' or \
l == '#ifdef TORRENT_DEBUG' or \
(l.startswith('#if ') and ' TORRENT_USE_ASSERTS' in l) or \
(l.startswith('#if ') and ' TORRENT_USE_INVARIANT_CHECKS' in l) or \
l == '#ifdef TORRENT_ASIO_DEBUGGING' or \
(l.startswith('#if') and 'defined TORRENT_DEBUG' in l) or \
(l.startswith('#if') and 'defined TORRENT_ASIO_DEBUGGING' in l):
while lno < len(lines):
l = lines[lno].strip()
lno += 1
if verbose: print 'prep %s' % l
if l.startswith('#endif'): end_if += 1
if l.startswith('#if'): start_if += 1
if l == '#else' and start_if - end_if == 1: break
if start_if - end_if == 0: break
return lno
else:
while l.endswith('\\') and lno < len(lines):
l = lines[lno].strip()
lno += 1
if verbose: print 'prep %s' % l
return lno
for filename in files:
h = open(filename)
lines = h.read().split('\n')
if verbose: print '\n=== %s ===\n' % filename
blanks = 0
lno = 0
while lno < len(lines):
l = lines[lno].strip()
lno += 1
if l == '':
blanks += 1
context = ''
continue
if l.startswith('//') and l[2:].strip() == 'OVERVIEW':
# this is a section overview
current_overview = ''
while lno < len(lines):
l = lines[lno].strip()
lno += 1
if not l.startswith('//'):
# end of overview
overviews[filename[11:]] = current_overview
current_overview = ''
break
l = l[2:]
if l.startswith(' '): l = l[1:]
current_overview += l + '\n'
if l.startswith('//'):
if verbose: print 'desc %s' % l
l = l[2:]
if len(l) and l[0] == ' ': l = l[1:]
context += l + '\n'
continue
if l.startswith('/*'):
lno = consume_comment(lno - 1, lines)
continue
if l.startswith('#'):
lno = consume_ifdef(lno - 1, lines)
continue
if (l == 'namespace detail' or \
l == 'namespace dht_detail' or \
l == 'namespace impl' or \
l == 'namespace aux') \
and not internal:
lno = consume_block(lno, lines)
continue
if 'TORRENT_CFG' in l:
blanks += 1
if verbose: print 'xx %s' % l
continue
if 'TORRENT_DEPRECATED' in l:
if ('class ' in l or 'struct ' in l) and not ';' in l:
lno = consume_block(lno - 1, lines)
context = ''
blanks += 1
if verbose: print 'xx %s' % l
continue
if 'TORRENT_EXPORT ' in l or l.startswith('inline ') or l.startswith('template') or internal:
if l.startswith('class ') or l.startswith('struct '):
if not l.endswith(';'):
current_class, lno = parse_class(lno -1, lines, filename)
if current_class != None and is_visible(context):
current_class['desc'] = context
if context == '':
print 'WARNING: class "%s" is not documented: \x1b[34m%s:%d\x1b[0m' \
% (current_class['name'], filename, lno)
classes.append(current_class)
context = ''
blanks += 1
continue
if looks_like_function(l):
current_fun, lno = parse_function(lno - 1, lines, filename)
if current_fun != None and is_visible(context):
if context == '' and blanks == 0 and len(functions):
functions[-1]['signatures'].update(current_fun['signatures'])
functions[-1]['names'].update(current_fun['names'])
else:
current_fun['desc'] = context
if context == '':
print 'WARNING: function "%s" is not documented: \x1b[34m%s:%d\x1b[0m' \
% (first_item(current_fun['names']), filename, lno)
functions.append(current_fun)
context = ''
blanks = 0
continue
if ('class ' in l or 'struct ' in l) and not ';' in l:
lno = consume_block(lno - 1, lines)
context = ''
blanks += 1
continue
if l.startswith('enum '):
if not is_visible(context):
consume_block(lno - 1, lines)
else:
current_enum, lno = parse_enum(lno - 1, lines, filename)
if current_enum != None and is_visible(context):
current_enum['desc'] = context
if context == '':
print 'WARNING: enum "%s" is not documented: \x1b[34m%s:%d\x1b[0m' \
% (current_enum['name'], filename, lno)
enums.append(current_enum)
context = ''
blanks += 1
continue
blanks += 1
if verbose:
if looks_like_forward_decl(l) \
or looks_like_blank(l) \
or looks_like_namespace(l):
print '-- %s' % l
else:
print '?? %s' % l
context = ''
h.close()
# ====================================================================
#
# RENDER PART
#
# ====================================================================
if dump:
if verbose: print '\n===============================\n'
for c in classes:
print '\x1b[4m%s\x1b[0m %s\n{' % (c['type'], c['name'])
for f in c['fun']:
for s in f['signatures']:
print ' %s' % s.replace('\n', '\n ')
if len(c['fun']) > 0 and len(c['fields']) > 0: print ''
for f in c['fields']:
for s in f['signatures']:
print ' %s' % s
if len(c['fields']) > 0 and len(c['enums']) > 0: print ''
for e in c['enums']:
print ' \x1b[4menum\x1b[0m %s\n {' % e['name']
for v in e['values']:
print ' %s' % v['name']
print ' };'
print '};\n'
for f in functions:
print '%s' % f['signature']
for e in enums:
print '\x1b[4menum\x1b[0m %s\n{' % e['name']
for v in e['values']:
print ' %s' % v['name']
print '};'
categories = {}
for c in classes:
cat = categorize_symbol(c['name'], c['file'])
if not cat in categories:
categories[cat] = { 'classes': [], 'functions': [], 'enums': [], 'filename': 'reference-%s.rst' % cat.replace(' ', '_')}
if c['file'] in overviews:
categories[cat]['overview'] = overviews[c['file']]
filename = categories[cat]['filename'].replace('.rst', '.html') + '#'
categories[cat]['classes'].append(c)
symbols[c['name']] = filename + c['name']
for f in c['fun']:
for n in f['names']:
symbols[n] = filename + n
symbols[c['name'] + '::' + n] = filename + n
for f in c['fields']:
for n in f['names']:
symbols[c['name'] + '::' + n] = filename + n
for e in c['enums']:
symbols[e['name']] = filename + e['name']
symbols[c['name'] + '::' + e['name']] = filename + e['name']
for v in e['values']:
# symbols[v['name']] = filename + v['name']
symbols[e['name'] + '::' + v['name']] = filename + v['name']
symbols[c['name'] + '::' + v['name']] = filename + v['name']
for f in functions:
cat = categorize_symbol(first_item(f['names']), f['file'])
if not cat in categories:
categories[cat] = { 'classes': [], 'functions': [], 'enums': [], 'filename': 'reference-%s.rst' % cat.replace(' ', '_')}
if f['file'] in overviews:
categories[cat]['overview'] = overviews[f['file']]
for n in f['names']:
symbols[n] = categories[cat]['filename'].replace('.rst', '.html') + '#' + n
categories[cat]['functions'].append(f)
for e in enums:
cat = categorize_symbol(e['name'], e['file'])
if not cat in categories:
categories[cat] = { 'classes': [], 'functions': [], 'enums': [], 'filename': 'reference-%s.rst' % cat.replace(' ', '_')}
categories[cat]['enums'].append(e)
filename = categories[cat]['filename'].replace('.rst', '.html') + '#'
symbols[e['name']] = filename + e['name']
for v in e['values']:
symbols[e['name'] + '::' + v['name']] = filename + v['name']
def print_declared_in(out, o):
out.write('Declared in "%s"\n\n' % print_link(o['file'], '../include/%s' % o['file']))
print >>out, dump_link_targets()
# returns RST marked up string
def linkify_symbols(string):
lines = string.split('\n')
ret = []
in_literal = False
lno = 0
for l in lines:
lno += 1
# don't touch headlines, i.e. lines whose
# next line entirely contains one of =, - or .
if (lno < len(lines)-1): next_line = lines[lno]
else: next_line = ''
if len(next_line) > 0 and lines[lno].replace('=',''). \
replace('-','').replace('.', '') == '':
ret.append(l)
continue
if l.startswith('|'):
ret.append(l)
continue
if in_literal and not l.startswith('\t') and not l == '':
# print ' end literal: "%s"' % l
in_literal = False
if in_literal:
# print ' literal: "%s"' % l
ret.append(l)
continue
if l.strip() == '.. parsed-literal::' or \
l.strip().startswith('.. code::') or \
(not l.strip().startswith('..') and l.endswith('::')):
# print ' start literal: "%s"' % l
in_literal = True
words = l.split(' ')
for i in range(len(words)):
# it's important to preserve leading
# tabs, since that's relevant for
# rst markup
leading = ''
w = words[i]
if len(w) == 0: continue
while len(w) > 0 and \
w[0] in ['\t', ' ', '(', '[', '{']:
leading += w[0]
w = w[1:]
# preserve commas and dots at the end
w = w.strip()
trailing = ''
if len(w) == 0: continue
while len(w) > 1 and w[-1] in ['.', ',', ')'] and w[-2:] != '()':
trailing = w[-1] + trailing
w = w[:-1]
link_name = w;
# print w
if len(w) == 0: continue
if link_name[-1] == '_': link_name = link_name[:-1]
if w in symbols:
link_name = link_name.replace('-', ' ')
# print ' found %s -> %s' % (w, link_name)
words[i] = leading + print_link(link_name, symbols[w]) + trailing
ret.append(' '.join(words))
return '\n'.join(ret)
link_targets = []
def print_link(name, target):
global link_targets
link_targets.append(target)
return "`%s`__" % name
def dump_link_targets(indent = ''):
global link_targets
ret = '\n'
for l in link_targets:
ret += '%s__ %s\n' % (indent, l)
link_targets = []
return ret
def heading(string, c, indent = ''):
string = string.strip()
return '\n' + indent + string + '\n' + indent + (c * len(string)) + '\n'
def render_enums(out, enums, print_declared_reference, header_level):
for e in enums:
print >>out, '.. raw:: html\n'
print >>out, '\t<a name="%s"></a>' % e['name']
print >>out, ''
print >>out, heading('enum %s' % e['name'], header_level)
print_declared_in(out, e)
width = [len('name'), len('value'), len('description')]
for i in range(len(e['values'])):
e['values'][i]['desc'] = linkify_symbols(e['values'][i]['desc'])
for v in e['values']:
width[0] = max(width[0], len(v['name']))
width[1] = max(width[1], len(v['val']))
for d in v['desc'].split('\n'):
width[2] = max(width[2], len(d))
print >>out, '+-' + ('-' * width[0]) + '-+-' + ('-' * width[1]) + '-+-' + ('-' * width[2]) + '-+'
print >>out, '| ' + 'name'.ljust(width[0]) + ' | ' + 'value'.ljust(width[1]) + ' | ' + 'description'.ljust(width[2]) + ' |'
print >>out, '+=' + ('=' * width[0]) + '=+=' + ('=' * width[1]) + '=+=' + ('=' * width[2]) + '=+'
for v in e['values']:
d = v['desc'].split('\n')
if len(d) == 0: d = ['']
print >>out, '| ' + v['name'].ljust(width[0]) + ' | ' + v['val'].ljust(width[1]) + ' | ' + d[0].ljust(width[2]) + ' |'
for s in d[1:]:
print >>out, '| ' + (' ' * width[0]) + ' | ' + (' ' * width[1]) + ' | ' + s.ljust(width[2]) + ' |'
print >>out, '+-' + ('-' * width[0]) + '-+-' + ('-' * width[1]) + '-+-' + ('-' * width[2]) + '-+'
print >>out, ''
print >>out, dump_link_targets()
sections = \
{
'Core': 0,
'Session': 0,
'Settings': 0,
'Bencoding': 1,
'Bdecoding': 1,
'Filter': 1,
'Error Codes': 1,
'Create Torrents': 1,
'ed25519': 2,
'Utility': 2,
'Storage': 2,
'Custom Storage': 2,
'Plugins': 2,
'Alerts': 3
}
def print_toc(out, categories, s):
for cat in categories:
if (s != 2 and cat not in sections) or \
(cat in sections and sections[cat] != s): continue
print >>out, '\t.. rubric:: %s\n' % cat
if 'overview' in categories[cat]:
print >>out, '\t| overview__'
category_filename = categories[cat]['filename'].replace('.rst', '.html')
for c in categories[cat]['classes']:
print >>out, '\t| ' + print_link(c['name'], symbols[c['name']])
for f in categories[cat]['functions']:
for n in f['names']:
print >>out, '\t| ' + print_link(n, symbols[n])
for e in categories[cat]['enums']:
print >>out, '\t| ' + print_link(e['name'], symbols[e['name']])
print >>out, ''
if 'overview' in categories[cat]:
print >>out, '\t__ %s#overview' % categories[cat]['filename'].replace('.rst', '.html')
print >>out, dump_link_targets('\t')
out = open('reference.rst', 'w+')
out.write('''=======================
reference documentation
=======================
''')
out.write('`single-page version`__\n\n__ single-page-ref.html\n\n')
for i in range(4):
out.write('.. container:: main-toc\n\n')
print_toc(out, categories, i)
out.close()
for cat in categories:
out = open(categories[cat]['filename'], 'w+')
classes = categories[cat]['classes']
functions = categories[cat]['functions']
enums = categories[cat]['enums']
out.write('''
:Author: Arvid Norberg, [email protected]
:Version: 1.1.4
`home`__
__ reference.html
%s
.. contents:: Table of contents
:depth: 2
:backlinks: none
''' % heading(cat, '='))
if 'overview' in categories[cat]:
out.write('%s\n' % linkify_symbols(categories[cat]['overview']))
for c in classes:
print >>out, '.. raw:: html\n'
print >>out, '\t<a name="%s"></a>' % c['name']
print >>out, ''
out.write('%s\n' % heading(c['name'], '-'))
print_declared_in(out, c)
c['desc'] = linkify_symbols(c['desc'])
out.write('%s\n' % c['desc'])
print >>out, dump_link_targets()
print >>out,'\n.. parsed-literal::\n\t'
block = '\n%s\n{\n' % c['decl']
for f in c['fun']:
for s in f['signatures']:
block += ' %s\n' % highlight_signature(s.replace('\n', '\n '))
if len(c['fun']) > 0 and len(c['enums']) > 0: block += '\n'
first = True
for e in c['enums']:
if not first:
block += '\n'
first = False
block += ' enum %s\n {\n' % e['name']
for v in e['values']:
block += ' %s,\n' % v['name']
block += ' };\n'
if len(c['fun']) + len(c['enums']) > 0 and len(c['fields']): block += '\n'
for f in c['fields']:
for s in f['signatures']:
block += ' %s\n' % s
block += '};'
print >>out, block.replace('\n', '\n\t') + '\n'
for f in c['fun']:
if f['desc'] == '': continue
title = ''
print >>out, '.. raw:: html\n'
for n in f['names']:
print >>out, '\t<a name="%s"></a>' % n
print >>out, ''
for n in f['names']:
title += '%s ' % n
print >>out, heading(title.strip(), '.')
block = '.. parsed-literal::\n\n'
for s in f['signatures']:
block += highlight_signature(s.replace('\n', '\n ')) + '\n'
print >>out, '%s\n' % block.replace('\n', '\n\t')
f['desc'] = linkify_symbols(f['desc'])
print >>out, '%s' % f['desc']
print >>out, dump_link_targets()
render_enums(out, c['enums'], False, '.')
for f in c['fields']:
if f['desc'] == '': continue
print >>out, '.. raw:: html\n'
for n in f['names']:
print >>out, '\t<a name="%s"></a>' % n
print >>out, ''
for n in f['names']:
print >>out, '%s ' % n,
print >>out, ''
f['desc'] = linkify_symbols(f['desc'])
print >>out, '\t%s' % f['desc'].replace('\n', '\n\t')
print >>out, dump_link_targets()
for f in functions:
h = ''
print >>out, '.. raw:: html\n'
for n in f['names']:
print >>out, '\t<a name="%s"></a>' % n
print >>out, ''
for n in f['names']:
h += '%s ' % n
print >>out, heading(h, '-')
print_declared_in(out, f)
block = '.. parsed-literal::\n\n'
for s in f['signatures']:
block += highlight_signature(s) + '\n'
print >>out, '%s\n' % block.replace('\n', '\n\t')
print >>out, linkify_symbols(f['desc'])
print >>out, dump_link_targets()
render_enums(out, enums, True, '-')
print >>out, dump_link_targets()
for i in static_links:
print >>out, i
out.close()
#for s in symbols:
# print s
for i,o in preprocess_rst.items():
f = open(i, 'r')
out = open(o, 'w+')
print 'processing %s -> %s' % (i, o)
l = linkify_symbols(f.read())
print >>out, l,
print >>out, dump_link_targets()
out.close()
f.close() | functions = [] |
example.py | xml = """<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml"
xmlns:meld="http://www.plope.com/software/meld3"
xmlns:bar="http://foo/bar">
<head>
<meta content="text/html; charset=ISO-8859-1" http-equiv="content-type" />
<title meld:id="title">This is the title</title>
</head>
<body>
<div/> <!-- empty tag -->
<div meld:id="content_well">
<form meld:id="form1" action="." method="POST">
<table border="0" meld:id="table1">
<tbody meld:id="tbody">
<tr>
<th>Name</th>
<th>Description</th>
</tr>
<tr meld:id="tr" class="foo">
<td meld:id="td1">Name</td> | </tbody>
</table>
<input type="submit" name="next" value=" Next "/>
</form>
</div>
</body>
</html>
"""
from meld3 import parse_xmlstring
from meld3 import parse_htmlstring
from StringIO import StringIO
import sys
root = parse_xmlstring(xml)
root.findmeld('title').content('My document')
root.findmeld('form1').attributes(action='./handler')
data = (
{'name':'Boys',
'description':'Ugly'},
{'name':'Girls',
'description':'Pretty'},
)
iterator = root.findmeld('tr').repeat(data)
for element, item in iterator:
element.findmeld('td1').content(item['name'])
element.findmeld('td2').content(item['description'])
root.write_xhtml(sys.stdout) | <td meld:id="td2">Description</td>
</tr> |
create_complete_campaign_google_ads_api_only.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example creates a search campaign with the help of Google Ads API only.
This code example is the last in a series of code examples that shows how to
create a Search campaign using the AdWords API, and then migrate it to the
Google Ads API one functionality at a time. See other examples in this directory
for code examples in various stages of migration.
This code example represents the final state, where all the functionality -
create a campaign budget, a Search campaign, ad groups, keywords and expanded
text ads have been migrated to using the Google Ads API. The AdWords API is not
used.
"""
import argparse
import datetime
import sys
import uuid
from google.ads.google_ads.client import GoogleAdsClient
from google.ads.google_ads.errors import GoogleAdsException
# Number of ads being added/updated in this code example.
NUMBER_OF_ADS = 5
# The list of keywords being added in this code example.
KEYWORDS_TO_ADD = ["mars cruise", "space hotel"]
PAGE_SIZE = 1000
def create_campaign_budget(client, customer_id):
"""Creates a new campaign budget and returns it.
Args:
client: A google.ads.google_ads.client.GoogleAdsClient instance.
customer_id: (str) Customer ID associated with the account.
Returns:
An instance of google.ads.google_ads.v5.types.CampaignBudget for the
newly created Budget.
"""
campaign_service = client.get_service("CampaignBudgetService", version="v5")
operation = client.get_type("CampaignBudgetOperation", version="v5")
criterion = operation.create
criterion.name = "Interplanetary Cruise Budget #{}".format(uuid.uuid4())
criterion.delivery_method = client.get_type(
"BudgetDeliveryMethodEnum", version="v5"
).STANDARD
criterion.amount_micros = 500000
response = campaign_service.mutate_campaign_budgets(
customer_id, [operation]
)
campaign_budget_resource_name = response.results[0].resource_name
new_campaign_budget = get_campaign_budget(
client, customer_id, campaign_budget_resource_name
)
print("Added budget named {}".format(new_campaign_budget.name))
return new_campaign_budget
def get_campaign_budget(client, customer_id, resource_name):
"""Retrieves a google.ads.google_ads.v5.types.CampaignBudget instance.
Args:
client: A google.ads.google_ads.client.GoogleAdsClient instance.
customer_id: (str) Customer ID associated with the account.
resource_name: (str) Resource name associated with the newly created
campaign.
Returns:
An instance of google.ads.google_ads.v5.types.CampaignBudget for the
newly created Budget.
"""
ga_service = client.get_service("GoogleAdsService", version="v5")
query = f"""
SELECT
campaign_budget.id,
campaign_budget.name,
campaign_budget.resource_name
FROM campaign_budget
WHERE campaign_budget.resource_name = '{resource_name}'"""
response = ga_service.search(customer_id, query, PAGE_SIZE)
budget = list(response)[0].campaign_budget
return budget
def create_campaign(client, customer_id, campaign_budget):
"""Creates a new campaign and returns it.
Args:
client: A google.ads.google_ads.client.GoogleAdsClient instance.
customer_id: (str) Customer ID associated with the account.
campaign_budget: A google.ads.google_ads.v5.types.CampaignBudget
instance.
Returns:
A google.ads.google_ads.client.GoogleAdsClient message class instance.
"""
operation = client.get_type("CampaignOperation", version="v5")
campaign = operation.create
campaign_service = client.get_service("CampaignService", version="v5")
campaign.name = "Interplanetary Cruise#{}".format(uuid.uuid4())
campaign.advertising_channel_type = client.get_type(
"AdvertisingChannelTypeEnum", version="v5"
).SEARCH
# Recommendation: Set the campaign to PAUSED when creating it to stop the
# ads from immediately serving. Set to ENABLED once you've added
# targeting and the ads are ready to serve.
campaign.status = client.get_type("CampaignStatusEnum", version="v5").PAUSED
campaign.manual_cpc.enhanced_cpc_enabled = True
campaign.campaign_budget = campaign_budget.resource_name
campaign.network_settings.target_google_search = True
campaign.network_settings.target_search_network = True
campaign.network_settings.target_content_network = False
campaign.network_settings.target_partner_search_network = False
campaign.start_date = (
datetime.datetime.now() + datetime.timedelta(1)
).strftime("%Y%m%d")
campaign.end_date = (
datetime.datetime.now() + datetime.timedelta(365)
).strftime("%Y%m%d")
response = campaign_service.mutate_campaigns(customer_id, [operation])
campaign_resource_name = response.results[0].resource_name
new_campaign = get_campaign(client, customer_id, campaign_resource_name)
print("Added campaign named {}".format(new_campaign.name))
return new_campaign
def get_campaign(client, customer_id, campaign_resource_name):
"""Retrieves a google.ads.google_ads.v5.types.Campaign instance.
Args:
client: A google.ads.google_ads.client.GoogleAdsClient instance.
customer_id: (str) Customer ID associated with the account.
campaign_resource_name: (str) Resource name associated with the newly
created campaign budget.
Returns:
A google.ads.google_ads.client.GoogleAdsClient message class instance.
"""
ga_service = client.get_service("GoogleAdsService", version="v5")
query = f"""
SELECT campaign.id, campaign.name, campaign.resource_name
FROM campaign
WHERE campaign.resource_name = '{campaign_resource_name}'"""
response = ga_service.search(customer_id, query, PAGE_SIZE)
campaign = list(response)[0].campaign
return campaign
def create_ad_group(client, customer_id, campaign):
"""Creates a new ad group and returns it.
Args:
client: A google.ads.google_ads.client.GoogleAdsClient instance.
customer_id: (str) Customer ID associated with the account.
campaign: A google.ads.google_ads.v5.types.Campaign instance.
Returns:
An instance of the google.ads.google_ads.v5.types.AdGroup message class
of the newly created ad group.
"""
operation = client.get_type("AdGroupOperation", version="v5")
adgroup = operation.create
adgroup_service = client.get_service("AdGroupService", version="v5")
adgroup.name = "Earth to Mars Cruises #{}".format(uuid.uuid4())
adgroup.campaign = campaign.resource_name
adgroup.status = client.get_type("AdGroupStatusEnum", version="v5").ENABLED
adgroup.type = client.get_type(
"AdGroupTypeEnum", version="v5"
).SEARCH_STANDARD
adgroup.cpc_bid_micros = 10000000
response = adgroup_service.mutate_ad_groups(customer_id, [operation])
ad_group_resource_name = response.results[0].resource_name
ad_group = get_ad_group(client, customer_id, ad_group_resource_name)
print("Added AdGroup named {}".format(ad_group.name))
return ad_group
def get_ad_group(client, customer_id, ad_group_resource_name):
"""Retrieves a google.ads.google_ads.v5.types.AdGroup instance.
Args:
client: A google.ads.google_ads.client.GoogleAdsClient instance.
customer_id: (str) Customer ID associated with the account.
ad_group_resource_name: (str) Resource name associated with the newly
created Ad group.
Returns:
An instance of the google.ads.google_ads.v5.types.AdGroup message class
of the newly created ad group.
"""
ga_service = client.get_service("GoogleAdsService", version="v5")
query = f"""
SELECT ad_group.id, ad_group.name, ad_group.resource_name
FROM ad_group
WHERE ad_group.resource_name = '{ad_group_resource_name}'"""
response = ga_service.search(customer_id, query, PAGE_SIZE)
ad_group = list(response)[0].ad_group
return ad_group
def create_text_ads(client, customer_id, ad_group):
"""Creates new text ads in a given ad group.
Args:
client: A google.ads.google_ads.client.GoogleAdsClient instance.
customer_id: (str) Customer ID associated with the account.
ad_group: A google.ads.google_ads.v5.types.AdGroup instance.
"""
operations = []
for i in range(0, NUMBER_OF_ADS):
operation = client.get_type("AdGroupAdOperation", version="v5")
ad_group_operation = operation.create
ad_group_operation.ad_group = ad_group.resource_name
ad_group_operation.status = client.get_type(
"AdGroupAdStatusEnum", version="v5"
).PAUSED
ad_group_operation.ad.expanded_text_ad.headline_part1 = (
f"Cruise to Mars #{str(uuid.uuid4())[:4]}"
)
ad_group_operation.ad.expanded_text_ad.headline_part2 = (
"Best Space Cruise Line"
)
ad_group_operation.ad.expanded_text_ad.description = (
"Buy your tickets now!"
)
ad_group_operation.ad.final_urls.append("http://www.example.com")
operations.append(operation)
adgroup_service = client.get_service("AdGroupAdService", version="v5")
ad_group_ad_response = adgroup_service.mutate_ad_group_ads(
customer_id, operations
)
new_ad_resource_names = []
for i in range(NUMBER_OF_ADS):
new_ad_resource_names.append(
ad_group_ad_response.results[i].resource_name
)
new_ads = get_ads(client, customer_id, new_ad_resource_names)
for i in range(len(new_ads)):
print(
"Created expanded text ad with ID {}, status {} and "
"headline {}.{}".format(
new_ads[i].ad.id,
new_ads[i].status,
new_ads[i].ad.expanded_text_ad.headline_part1,
new_ads[i].ad.expanded_text_ad.headline_part2,
)
)
def get_ads(client, customer_id, new_ad_resource_names):
"""Retrieves a google.ads.google_ads.v5.types.AdGroupAd instance.
Args:
client: A google.ads.google_ads.client.GoogleAdsClient instanc e.
customer_id: (str) Customer ID associated with the account.
new_ad_resource_names: (str) Resource name associated with the Ad group.
Returns:
An instance of the google.ads.google_ads.v5.types.AdGroupAd message
class of the newly created ad group ad.
"""
def formatter(given_string):
"""Assigns ' ' to names of resources.
This produces a formatted string that can be used within an IN clause.
Args:
given_string: (str) The string to be formatted.
Returns:
The formatted string.
"""
results = []
for i in given_string:
results.append(repr(i))
return ",".join(results)
resource_names = formatter(new_ad_resource_names)
ga_service = client.get_service("GoogleAdsService", version="v5")
query = f"""
SELECT
ad_group_ad.ad.id,
ad_group_ad.ad.expanded_text_ad.headline_part1,
ad_group_ad.ad.expanded_text_ad.headline_part2,
ad_group_ad.status, ad_group_ad.ad.final_urls,
ad_group_ad.resource_name
FROM ad_group_ad
WHERE ad_group_ad.resource_name IN ({resource_names})"""
response = ga_service.search(customer_id, query, PAGE_SIZE)
response = iter(response)
ads = []
while response:
try:
current_row = next(response)
ads.append(current_row.ad_group_ad)
except StopIteration:
break
return ads
def create_keywords(client, customer_id, ad_group, keywords_to_add):
"""Creates new keywords on a given ad group.
Args:
client: A google.ads.google_ads.client.GoogleAdsClient instance.
customer_id: (str) Customer ID associated with the account.
ad_group: A google.ads.google_ads.v5.types.AdGroup instance.
keywords_to_add: keywords_to_add: (list) A list of keywords which are to
be added to a given ad group.
"""
ad_group_criterion_operations = []
for keyword in keywords_to_add:
operation = client.get_type("AdGroupCriterionOperation", version="v5")
ad_group_criterion_operation = operation.create
ad_group_criterion_operation.ad_group = ad_group.resource_name
ad_group_criterion_operation.status = client.get_type(
"AdGroupCriterionStatusEnum", version="v5"
).ENABLED
ad_group_criterion_operation.keyword.text = keyword
ad_group_criterion_operation.keyword.match_type = client.get_type(
"KeywordMatchTypeEnum", version="v5"
).EXACT
ad_group_criterion_operations.append(operation)
ad_group_criterion_service_client = client.get_service(
"AdGroupCriterionService", version="v5"
)
ad_group_criterion_response = ad_group_criterion_service_client.mutate_ad_group_criteria(
customer_id, ad_group_criterion_operations
)
new_ad_resource_names = []
for i in range(len(keywords_to_add)):
new_ad_resource_names.append(
ad_group_criterion_response.results[i].resource_name
)
new_keywords = get_keywords(client, customer_id, new_ad_resource_names)
for i in range(len(new_keywords)):
print(
"Keyword with text {}, id = {} and "
"match type {} was created".format(
new_keywords[i].keyword.text,
new_keywords[i].criterion_id,
new_keywords[i].keyword.match_type,
)
)
def get_keywords(client, customer_id, keyword_resource_names):
|
if __name__ == "__main__":
# Initialize client object.
# It will read the config file. The default file path is the Home Directory.
google_ads_client = GoogleAdsClient.load_from_storage()
parser = argparse.ArgumentParser(
description="Lists all campaigns for specified customer."
)
# The following argument(s) should be provided to run the example.
parser.add_argument(
"-c",
"--customer_id",
type=str,
required=True,
help="The Google Ads customer ID.",
)
args = parser.parse_args()
try:
budget = create_campaign_budget(google_ads_client, args.customer_id)
campaign = create_campaign(google_ads_client, args.customer_id, budget)
ad_group = create_ad_group(
google_ads_client, args.customer_id, campaign
)
create_text_ads(google_ads_client, args.customer_id, ad_group)
create_keywords(
google_ads_client, args.customer_id, ad_group, KEYWORDS_TO_ADD
)
except GoogleAdsException as ex:
print(
f"Request with ID '{ex.request_id}' failed with status "
f"'{ex.error.code().name}' and includes the following errors:"
)
for error in ex.failure.errors:
print(f"\tError with message '{error.message}'.")
if error.location:
for field_path_element in error.location.field_path_elements:
print(f"\t\tOn field: {field_path_element.field_name}")
sys.exit(1)
| """Retrieves a google.ads.google_ads.v5.types.AdGroupCriterion instance.
Args:
client: A google.ads.google_ads.client.GoogleAdsClient instance.
customer_id: (str) Customer ID associated with the account.
keyword_resource_names: (str) Resource name associated with the newly
created ad group criterion.
Returns:
An instance of the google.ads.google_ads.v5.types.AdGroupCriterion
message class of the newly created ad group criterion.
"""
def formatter(given_string):
"""Assigns ' ' to names of resources.
This produces a formatted string that can be used within an IN clause.
Args:
given_string: (str) The string to be formatted.
Returns:
The formatted string.
"""
results = []
for i in given_string:
results.append(repr(i))
return ",".join(results)
resource_names = formatter(keyword_resource_names)
ga_service = client.get_service("GoogleAdsService", version="v5")
query = f"""
SELECT
ad_group.id,
ad_group.status,
ad_group_criterion.criterion_id,
ad_group_criterion.keyword.text,
ad_group_criterion.keyword.match_type
FROM ad_group_criterion
WHERE
ad_group_criterion.type = 'KEYWORD'
AND ad_group.status = 'ENABLED'
AND ad_group_criterion.status IN ('ENABLED', 'PAUSED')
AND ad_group_criterion.resource_name IN ({resource_names})"""
response = ga_service.search(customer_id, query, PAGE_SIZE)
response = iter(response)
keywords = []
while True:
try:
current_row = next(response)
keywords.append(current_row.ad_group_criterion)
except StopIteration:
break
return keywords |
regulatingRating.js | import Eth from 'ethjs'
import web3 from 'web3'
import { getProvider } from './provider'
import { getRegulatingRating, getRegulatingRatingView } from '../config'
import store from '../store'
import { wrapSend } from '../utils/utils'
class RegulatingRatingService {
constructor () {
this.address = null
this.account = null
this.rs = null
}
async init () {
/* important to check for provider in
* init function (rather than constructor),
* so that injected web3 has time to load.
*/
this.eth = new Eth(getProvider())
const accounts = await this.eth.accounts()
this.rr = await getRegulatingRating(accounts[0])
this.rrv = await getRegulatingRatingView(accounts[0])
this.address = this.rr.address
this.account = accounts[0]
this.setUpEvents()
wrapSend(this, ['rr'])
store.dispatch({
type: 'REGULATING_RATING_CONTRACT_INIT'
})
}
setUpEvents () {
this.rr.allEvents()
.watch((error, log) => {
if (error) {
console.error(error)
return false
}
store.dispatch({
type: 'REGULATING_RATING_EVENT'
})
})
}
async isObjFinalized (hash, id, obj) {
let result = await this.rrv.isObjFinalized.call(hash, id, obj)
return result
}
async getBidInfo (hash, id, objs) {
let result = []
for (let i = 0; i < objs.length; i++) {
result.push(await this.rrv.isRegulatorBid.call(hash, id, objs[i], this.account))
}
return result
}
async bid (name, id, obj) {
await this.rr.bid(web3.utils.keccak256(name), id, obj)
}
async backout (name, id, obj) {
await this.rr.backOutFromBid(web3.utils.keccak256(name), id, obj)
}
async finalizeBidForObj (name, id, obj) {
await this.rr.finalizeBidForObj(web3.utils.keccak256(name), id, obj)
}
async finalizeAllBids (name, id) {
await this.rr.finalizeAllBids(web3.utils.keccak256(name), id)
}
async isRegulator (hash, id, obj) {
let result = await this.rrv.isRegulator.call(hash, id, obj, this.account)
return result
}
async maxScore () {
let result = await this.rr.maxScore.call() | async regulatorVote (name, id, obj, score) {
await this.rr.regulatorVote(web3.utils.keccak256(name), id, obj, score)
}
async getRegulatorVoteInfo (hash, id, obj, addr) {
let result = await this.rrv.getRegulatorVoteInfo.call(hash, id, obj, addr)
return {
weight: result[0],
score: result[1].toNumber()
}
}
async getRegulatorList (hash, id, obj) {
let result = await this.rrv.getObjRegulationInfo.call(hash, id, obj)
return result[2]
}
}
export default new RegulatingRatingService() | return result.toNumber()
}
|
querier_test.go | package keeper
import (
"bytes"
"encoding/hex"
"fmt"
"testing"
"time"
"github.com/cosmos/cosmos-sdk/codec"
sdk "github.com/cosmos/cosmos-sdk/types"
gethcommon "github.com/ethereum/go-ethereum/common"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/onomyprotocol/near-aurora-bridge/module/x/nab/types"
)
//nolint: exhaustivestruct
func TestQueryValsetConfirm(t *testing.T) {
var (
nonce = uint64(1)
myValidatorCosmosAddr, _ = sdk.AccAddressFromBech32("cosmos1ees2tqhhhm9ahlhceh2zdguww9lqn2ckukn86l")
myValidatorEthereumAddr gethcommon.Address = gethcommon.BytesToAddress(bytes.Repeat([]byte{byte(50)}, 20))
)
input := CreateTestEnv(t)
ctx := input.Context
input.GravityKeeper.SetValsetConfirm(ctx, types.MsgValsetConfirm{
Nonce: nonce,
Orchestrator: myValidatorCosmosAddr.String(),
EthAddress: myValidatorEthereumAddr.String(),
Signature: "alksdjhflkasjdfoiasjdfiasjdfoiasdj",
})
specs := map[string]struct {
srcNonce string
srcAddr string
expErr bool
expResp []byte
}{
"all good": {
srcNonce: "1",
srcAddr: myValidatorCosmosAddr.String(),
expResp: []byte(`{"type":"nab/MsgValsetConfirm", "value":{"eth_address":"0x3232323232323232323232323232323232323232", "nonce": "1", "orchestrator": "cosmos1ees2tqhhhm9ahlhceh2zdguww9lqn2ckukn86l", "signature": "alksdjhflkasjdfoiasjdfiasjdfoiasdj"}}`),
},
"unknown nonce": {
srcNonce: "999999",
srcAddr: myValidatorCosmosAddr.String(),
},
"invalid address": {
srcNonce: "1",
srcAddr: "not a valid addr",
expErr: true,
},
"invalid nonce": {
srcNonce: "not a valid nonce",
srcAddr: myValidatorCosmosAddr.String(),
expErr: true,
},
}
for msg, spec := range specs {
t.Run(msg, func(t *testing.T) {
got, err := queryValsetConfirm(ctx, []string{spec.srcNonce, spec.srcAddr}, input.GravityKeeper)
if spec.expErr {
require.Error(t, err)
return
}
require.NoError(t, err)
if spec.expResp == nil {
assert.Nil(t, got)
return
}
assert.JSONEq(t, string(spec.expResp), string(got))
})
}
}
//nolint: exhaustivestruct
func TestAllValsetConfirmsBynonce(t *testing.T) {
input := CreateTestEnv(t)
ctx := input.Context
addrs := []string{
"cosmos1u508cfnsk2nhakv80vdtq3nf558ngyvldkfjj9",
"cosmos1krtcsrxhadj54px0vy6j33pjuzcd3jj8kmsazv",
"cosmos1u94xef3cp9thkcpxecuvhtpwnmg8mhlja8hzkd",
}
// seed confirmations
for i := 0; i < 3; i++ {
addr, _ := sdk.AccAddressFromBech32(addrs[i])
msg := types.MsgValsetConfirm{}
msg.EthAddress = gethcommon.BytesToAddress(bytes.Repeat([]byte{byte(i + 1)}, 20)).String()
msg.Nonce = uint64(1)
msg.Orchestrator = addr.String()
msg.Signature = fmt.Sprintf("signature %d", i+1)
input.GravityKeeper.SetValsetConfirm(ctx, msg)
}
specs := map[string]struct {
srcNonce string
expErr bool
expResp []byte
}{
"all good": {
srcNonce: "1",
expResp: []byte(`[
{"eth_address":"0x0202020202020202020202020202020202020202", "nonce": "1", "orchestrator": "cosmos1krtcsrxhadj54px0vy6j33pjuzcd3jj8kmsazv", "signature": "signature 2"},
{"eth_address":"0x0303030303030303030303030303030303030303", "nonce": "1", "orchestrator": "cosmos1u94xef3cp9thkcpxecuvhtpwnmg8mhlja8hzkd", "signature": "signature 3"},
{"eth_address":"0x0101010101010101010101010101010101010101", "nonce": "1", "orchestrator": "cosmos1u508cfnsk2nhakv80vdtq3nf558ngyvldkfjj9", "signature": "signature 1"}
]`),
},
"unknown nonce": {
srcNonce: "999999",
expResp: nil,
},
"invalid nonce": {
srcNonce: "not a valid nonce",
expErr: true,
},
}
for msg, spec := range specs {
t.Run(msg, func(t *testing.T) {
got, err := queryAllValsetConfirms(ctx, spec.srcNonce, input.GravityKeeper)
if spec.expErr {
require.Error(t, err)
return
}
require.NoError(t, err)
if spec.expResp == nil {
assert.Nil(t, got)
return
}
assert.JSONEq(t, string(spec.expResp), string(got))
})
}
}
// TODO: Check failure modes
//nolint: exhaustivestruct
func TestLastValsetRequests(t *testing.T) {
input := CreateTestEnv(t)
ctx := input.Context
// seed with maxValsetRequestsReturns + 1 requests
for i := 0; i < maxValsetRequestsReturned+1; i++ {
var validators []sdk.ValAddress
for j := 0; j <= i; j++ {
// add an validator each block
valAddr := bytes.Repeat([]byte{byte(j)}, sdk.AddrLen)
input.GravityKeeper.SetEthAddressForValidator(ctx, valAddr, gethcommon.BytesToAddress(bytes.Repeat([]byte{byte(j + 1)}, 20)).String())
validators = append(validators, valAddr)
}
input.GravityKeeper.StakingKeeper = NewStakingKeeperMock(validators...)
ctx = ctx.WithBlockHeight(int64(100 + i))
input.GravityKeeper.SetValsetRequest(ctx)
}
specs := map[string]struct {
expResp []byte
}{ // Expect only maxValsetRequestsReturns back
"limit at 5": {
expResp: []byte(`[
{
"nonce": "6",
"height": "105",
"reward_amount": "0",
"reward_token": "0x0000000000000000000000000000000000000000",
"members": [
{
"power": "715827882",
"ethereum_address": "0x0101010101010101010101010101010101010101"
},
{
"power": "715827882",
"ethereum_address": "0x0202020202020202020202020202020202020202"
},
{
"power": "715827882",
"ethereum_address": "0x0303030303030303030303030303030303030303"
},
{
"power": "715827882",
"ethereum_address": "0x0404040404040404040404040404040404040404"
},
{
"power": "715827882",
"ethereum_address": "0x0505050505050505050505050505050505050505"
},
{
"power": "715827882",
"ethereum_address": "0x0606060606060606060606060606060606060606"
}
]
},
{
"nonce": "5",
"height": "104",
"reward_amount": "0",
"reward_token": "0x0000000000000000000000000000000000000000",
"members": [
{
"power": "858993459",
"ethereum_address": "0x0101010101010101010101010101010101010101"
},
{
"power": "858993459",
"ethereum_address": "0x0202020202020202020202020202020202020202"
},
{
"power": "858993459",
"ethereum_address": "0x0303030303030303030303030303030303030303"
},
{
"power": "858993459",
"ethereum_address": "0x0404040404040404040404040404040404040404"
},
{
"power": "858993459",
"ethereum_address": "0x0505050505050505050505050505050505050505"
}
]
},
{
"nonce": "4",
"height": "103",
"reward_amount": "0",
"reward_token": "0x0000000000000000000000000000000000000000",
"members": [
{
"power": "1073741823",
"ethereum_address": "0x0101010101010101010101010101010101010101"
},
{
"power": "1073741823",
"ethereum_address": "0x0202020202020202020202020202020202020202"
},
{
"power": "1073741823",
"ethereum_address": "0x0303030303030303030303030303030303030303"
},
{
"power": "1073741823",
"ethereum_address": "0x0404040404040404040404040404040404040404"
}
]
},
{
"nonce": "3",
"height": "102",
"reward_amount": "0",
"reward_token": "0x0000000000000000000000000000000000000000",
"members": [
{
"power": "1431655765",
"ethereum_address": "0x0101010101010101010101010101010101010101"
},
{
"power": "1431655765",
"ethereum_address": "0x0202020202020202020202020202020202020202"
},
{
"power": "1431655765",
"ethereum_address": "0x0303030303030303030303030303030303030303"
}
]
},
{
"nonce": "2",
"height": "101",
"reward_amount": "0",
"reward_token": "0x0000000000000000000000000000000000000000",
"members": [
{
"power": "2147483647",
"ethereum_address": "0x0101010101010101010101010101010101010101"
},
{
"power": "2147483647",
"ethereum_address": "0x0202020202020202020202020202020202020202"
}
]
}
]`),
},
}
for msg, spec := range specs {
t.Run(msg, func(t *testing.T) {
got, err := lastValsetRequests(ctx, input.GravityKeeper)
require.NoError(t, err)
assert.JSONEq(t, string(spec.expResp), string(got), string(got))
})
}
}
//nolint: exhaustivestruct
// TODO: check that it doesn't accidently return a valset that HAS been signed
// Right now it is basically just testing that any valset comes back
func TestPendingValsetRequests(t *testing.T) {
input := CreateTestEnv(t)
ctx := input.Context
// seed with requests
for i := 0; i < 6; i++ {
var validators []sdk.ValAddress
for j := 0; j <= i; j++ {
// add an validator each block
valAddr := bytes.Repeat([]byte{byte(j)}, sdk.AddrLen)
input.GravityKeeper.SetEthAddressForValidator(ctx, valAddr, gethcommon.BytesToAddress(bytes.Repeat([]byte{byte(j + 1)}, 20)).String())
validators = append(validators, valAddr)
}
input.GravityKeeper.StakingKeeper = NewStakingKeeperMock(validators...)
ctx = ctx.WithBlockHeight(int64(100 + i))
input.GravityKeeper.SetValsetRequest(ctx)
}
specs := map[string]struct {
expResp []byte
}{
"find valset": {
expResp: []byte(`[
{
"nonce": "6",
"members": [
{
"power": "715827882",
"ethereum_address": "0x0101010101010101010101010101010101010101"
},
{
"power": "715827882",
"ethereum_address": "0x0202020202020202020202020202020202020202"
},
{
"power": "715827882",
"ethereum_address": "0x0303030303030303030303030303030303030303"
},
{
"power": "715827882",
"ethereum_address": "0x0404040404040404040404040404040404040404"
},
{
"power": "715827882",
"ethereum_address": "0x0505050505050505050505050505050505050505"
},
{
"power": "715827882",
"ethereum_address": "0x0606060606060606060606060606060606060606"
}
],
"height": "105",
"reward_amount": "0",
"reward_token": "0x0000000000000000000000000000000000000000"
},
{
"nonce": "5",
"members": [
{
"power": "858993459",
"ethereum_address": "0x0101010101010101010101010101010101010101"
},
{
"power": "858993459",
"ethereum_address": "0x0202020202020202020202020202020202020202"
},
{
"power": "858993459",
"ethereum_address": "0x0303030303030303030303030303030303030303"
},
{
"power": "858993459",
"ethereum_address": "0x0404040404040404040404040404040404040404"
},
{
"power": "858993459",
"ethereum_address": "0x0505050505050505050505050505050505050505"
}
],
"height": "104",
"reward_amount": "0",
"reward_token": "0x0000000000000000000000000000000000000000"
},
{
"nonce": "4",
"members": [
{
"power": "1073741823",
"ethereum_address": "0x0101010101010101010101010101010101010101"
},
{
"power": "1073741823",
"ethereum_address": "0x0202020202020202020202020202020202020202"
},
{
"power": "1073741823",
"ethereum_address": "0x0303030303030303030303030303030303030303"
},
{
"power": "1073741823",
"ethereum_address": "0x0404040404040404040404040404040404040404"
}
],
"height": "103",
"reward_amount": "0",
"reward_token": "0x0000000000000000000000000000000000000000"
},
{
"nonce": "3",
"members": [
{
"power": "1431655765",
"ethereum_address": "0x0101010101010101010101010101010101010101"
},
{
"power": "1431655765",
"ethereum_address": "0x0202020202020202020202020202020202020202"
},
{
"power": "1431655765",
"ethereum_address": "0x0303030303030303030303030303030303030303"
}
],
"height": "102",
"reward_amount": "0",
"reward_token": "0x0000000000000000000000000000000000000000"
},
{
"nonce": "2",
"members": [
{
"power": "2147483647",
"ethereum_address": "0x0101010101010101010101010101010101010101"
},
{
"power": "2147483647",
"ethereum_address": "0x0202020202020202020202020202020202020202"
}
],
"height": "101",
"reward_amount": "0",
"reward_token": "0x0000000000000000000000000000000000000000"
},
{
"nonce": "1",
"members": [
{
"power": "4294967295",
"ethereum_address": "0x0101010101010101010101010101010101010101"
}
],
"height": "100",
"reward_amount": "0",
"reward_token": "0x0000000000000000000000000000000000000000"
}
]`),
},
}
for msg, spec := range specs {
t.Run(msg, func(t *testing.T) {
var valAddr sdk.AccAddress = bytes.Repeat([]byte{byte(1)}, sdk.AddrLen)
got, err := lastPendingValsetRequest(ctx, valAddr.String(), input.GravityKeeper)
require.NoError(t, err)
assert.JSONEq(t, string(spec.expResp), string(got), string(got))
})
}
}
//nolint: exhaustivestruct
// TODO: check that it actually returns a batch that has NOT been signed, not just any batch
func TestLastPendingBatchRequest(t *testing.T) {
input := CreateTestEnv(t)
ctx := input.Context
// seed with valset requests and eth addresses to make validators
// that we will later use to lookup batches to be signed
for i := 0; i < 6; i++ {
var validators []sdk.ValAddress
for j := 0; j <= i; j++ {
// add an validator each block
// TODO: replace with real SDK addresses
valAddr := bytes.Repeat([]byte{byte(j)}, sdk.AddrLen)
input.GravityKeeper.SetEthAddressForValidator(ctx, valAddr, gethcommon.BytesToAddress(bytes.Repeat([]byte{byte(j + 1)}, 20)).String())
validators = append(validators, valAddr)
}
input.GravityKeeper.StakingKeeper = NewStakingKeeperMock(validators...)
input.GravityKeeper.SetValsetRequest(ctx)
}
createTestBatch(t, input)
specs := map[string]struct {
expResp []byte
}{
"find batch": {
expResp: []byte(`{
"type": "nab/OutgoingTxBatch",
"value": {
"batch_nonce": "1",
"block": "1234567",
"transactions": [
{
"id": "2",
"sender": "cosmos1qyqszqgpqyqszqgpqyqszqgpqyqszqgpjnp7du",
"dest_address": "0x320915BD0F1bad11cBf06e85D5199DBcAC4E9934",
"erc20_token": {
"amount": "101",
"contract": "0xAb5801a7D398351b8bE11C439e05C5B3259aeC9B"
},
"erc20_fee": {
"amount": "3",
"contract": "0xAb5801a7D398351b8bE11C439e05C5B3259aeC9B"
}
},
{
"id": "3",
"sender": "cosmos1qyqszqgpqyqszqgpqyqszqgpqyqszqgpjnp7du",
"dest_address": "0x320915BD0F1bad11cBf06e85D5199DBcAC4E9934",
"erc20_token": {
"amount": "102",
"contract": "0xAb5801a7D398351b8bE11C439e05C5B3259aeC9B"
},
"erc20_fee": {
"amount": "2",
"contract": "0xAb5801a7D398351b8bE11C439e05C5B3259aeC9B"
}
}
],
"token_contract": "0xAb5801a7D398351b8bE11C439e05C5B3259aeC9B"
}
}
`,
)},
}
for msg, spec := range specs {
t.Run(msg, func(t *testing.T) {
var valAddr sdk.AccAddress = bytes.Repeat([]byte{byte(1)}, sdk.AddrLen)
got, err := lastPendingBatchRequest(ctx, valAddr.String(), input.GravityKeeper)
require.NoError(t, err)
assert.JSONEq(t, string(spec.expResp), string(got), string(got))
})
}
}
//nolint: exhaustivestruct
func createTestBatch(t *testing.T, input TestInput) {
var (
mySender = bytes.Repeat([]byte{1}, sdk.AddrLen)
myReceiver = "0x320915BD0F1bad11cBf06e85D5199DBcAC4E9934"
myTokenContractAddr = "0xAb5801a7D398351b8bE11C439e05C5B3259aeC9B"
now = time.Now().UTC()
)
// mint some voucher first
allVouchers := sdk.Coins{types.NewERC20Token(99999, myTokenContractAddr).GravityCoin()}
err := input.BankKeeper.MintCoins(input.Context, types.ModuleName, allVouchers)
require.NoError(t, err)
// set senders balance
input.AccountKeeper.NewAccountWithAddress(input.Context, mySender)
err = input.BankKeeper.SetBalances(input.Context, mySender, allVouchers)
require.NoError(t, err)
// add some TX to the pool
for i, v := range []uint64{2, 3, 2, 1} {
amount := types.NewERC20Token(uint64(i+100), myTokenContractAddr).GravityCoin()
fee := types.NewERC20Token(v, myTokenContractAddr).GravityCoin()
_, err = input.GravityKeeper.AddToOutgoingPool(input.Context, mySender, myReceiver, amount, fee)
require.NoError(t, err)
// Should create:
// 1: amount 100, fee 2
// 2: amount 101, fee 3
// 3: amount 102, fee 2
// 4: amount 103, fee 1
}
// when
input.Context = input.Context.WithBlockTime(now)
// tx batch size is 2, so that some of them stay behind
_, err = input.GravityKeeper.BuildOutgoingTXBatch(input.Context, myTokenContractAddr, 2)
require.NoError(t, err)
// Should have 2 and 3 from above
// 1 and 4 should be unbatched
}
//nolint: exhaustivestruct
func | (t *testing.T) {
input := CreateTestEnv(t)
ctx := input.Context
var (
tokenContract = "0xAb5801a7D398351b8bE11C439e05C5B3259aeC9B"
validatorAddr, _ = sdk.AccAddressFromBech32("cosmos1mgamdcs9dah0vn0gqupl05up7pedg2mvupe6hh")
)
input.GravityKeeper.SetBatchConfirm(ctx, &types.MsgConfirmBatch{
Nonce: 1,
TokenContract: tokenContract,
EthSigner: "0xf35e2cc8e6523d683ed44870f5b7cc785051a77d",
Orchestrator: validatorAddr.String(),
Signature: "signature",
})
batchConfirms, err := queryAllBatchConfirms(ctx, "1", tokenContract, input.GravityKeeper)
require.NoError(t, err)
expectedJSON := []byte(`[{"eth_signer":"0xf35e2cc8e6523d683ed44870f5b7cc785051a77d", "nonce":"1", "signature":"signature", "token_contract":"0xAb5801a7D398351b8bE11C439e05C5B3259aeC9B", "orchestrator":"cosmos1mgamdcs9dah0vn0gqupl05up7pedg2mvupe6hh"}]`)
assert.JSONEq(t, string(expectedJSON), string(batchConfirms), "json is equal")
}
//nolint: exhaustivestruct
func TestQueryLogicCalls(t *testing.T) {
input := CreateTestEnv(t)
ctx := input.Context
k := input.GravityKeeper
var (
logicContract = "0x510ab76899430424d209a6c9a5b9951fb8a6f47d"
payload = []byte("fake bytes")
tokenContract = "0x7580bfe88dd3d07947908fae12d95872a260f2d8"
invalidationId = []byte("GravityTesting")
invalidationNonce uint64 = 1
)
// seed with valset requests and eth addresses to make validators
// that we will later use to lookup calls to be signed
for i := 0; i < 6; i++ {
var validators []sdk.ValAddress
for j := 0; j <= i; j++ {
// add an validator each block
// TODO: replace with real SDK addresses
valAddr := bytes.Repeat([]byte{byte(j)}, sdk.AddrLen)
input.GravityKeeper.SetEthAddressForValidator(ctx, valAddr, gethcommon.BytesToAddress(bytes.Repeat([]byte{byte(j + 1)}, 20)).String())
validators = append(validators, valAddr)
}
input.GravityKeeper.StakingKeeper = NewStakingKeeperMock(validators...)
}
token := []*types.ERC20Token{{
Contract: tokenContract,
Amount: sdk.NewIntFromUint64(5000),
}}
call := types.OutgoingLogicCall{
Transfers: token,
Fees: token,
LogicContractAddress: logicContract,
Payload: payload,
Timeout: 10000,
InvalidationId: invalidationId,
InvalidationNonce: uint64(invalidationNonce),
}
k.SetOutgoingLogicCall(ctx, &call)
res := k.GetOutgoingLogicCall(ctx, invalidationId, invalidationNonce)
require.Equal(t, call, *res)
_, err := lastLogicCallRequests(ctx, k)
require.NoError(t, err)
var valAddr sdk.AccAddress = bytes.Repeat([]byte{byte(1)}, sdk.AddrLen)
_, err = lastPendingLogicCallRequest(ctx, valAddr.String(), k)
require.NoError(t, err)
require.NoError(t, err)
}
//nolint: exhaustivestruct
func TestQueryLogicCallsConfirms(t *testing.T) {
input := CreateTestEnv(t)
ctx := input.Context
k := input.GravityKeeper
var (
logicContract = "0x510ab76899430424d209a6c9a5b9951fb8a6f47d"
payload = []byte("fake bytes")
tokenContract = "0x7580bfe88dd3d07947908fae12d95872a260f2d8"
invalidationId = []byte("GravityTesting")
invalidationNonce uint64 = 1
)
// seed with valset requests and eth addresses to make validators
// that we will later use to lookup calls to be signed
for i := 0; i < 6; i++ {
var validators []sdk.ValAddress
for j := 0; j <= i; j++ {
// add an validator each block
// TODO: replace with real SDK addresses
valAddr := bytes.Repeat([]byte{byte(j)}, sdk.AddrLen)
input.GravityKeeper.SetEthAddressForValidator(ctx, valAddr, gethcommon.BytesToAddress(bytes.Repeat([]byte{byte(j + 1)}, 20)).String())
validators = append(validators, valAddr)
}
input.GravityKeeper.StakingKeeper = NewStakingKeeperMock(validators...)
}
token := []*types.ERC20Token{{
Contract: tokenContract,
Amount: sdk.NewIntFromUint64(5000),
}}
call := types.OutgoingLogicCall{
Transfers: token,
Fees: token,
LogicContractAddress: logicContract,
Payload: payload,
Timeout: 10000,
InvalidationId: invalidationId,
InvalidationNonce: uint64(invalidationNonce),
}
k.SetOutgoingLogicCall(ctx, &call)
var valAddr sdk.AccAddress = bytes.Repeat([]byte{byte(1)}, sdk.AddrLen)
confirm := types.MsgConfirmLogicCall{
InvalidationId: hex.EncodeToString(invalidationId),
InvalidationNonce: 1,
EthSigner: "test",
Orchestrator: valAddr.String(),
Signature: "test",
}
k.SetLogicCallConfirm(ctx, &confirm)
res := k.GetLogicConfirmByInvalidationIDAndNonce(ctx, invalidationId, 1)
assert.Equal(t, len(res), 1)
}
//nolint: exhaustivestruct
// TODO: test that it gets the correct batch, not just any batch.
// Check with multiple nonces and tokenContracts
func TestQueryBatch(t *testing.T) {
input := CreateTestEnv(t)
ctx := input.Context
var (
tokenContract = "0xAb5801a7D398351b8bE11C439e05C5B3259aeC9B"
)
createTestBatch(t, input)
batch, err := queryBatch(ctx, "1", tokenContract, input.GravityKeeper)
require.NoError(t, err)
expectedJSON := []byte(`{
"type": "nab/OutgoingTxBatch",
"value": {
"transactions": [
{
"erc20_fee": {
"amount": "3",
"contract": "0xAb5801a7D398351b8bE11C439e05C5B3259aeC9B"
},
"dest_address": "0x320915BD0F1bad11cBf06e85D5199DBcAC4E9934",
"erc20_token": {
"amount": "101",
"contract": "0xAb5801a7D398351b8bE11C439e05C5B3259aeC9B"
},
"sender": "cosmos1qyqszqgpqyqszqgpqyqszqgpqyqszqgpjnp7du",
"id": "2"
},
{
"erc20_fee": {
"amount": "2",
"contract": "0xAb5801a7D398351b8bE11C439e05C5B3259aeC9B"
},
"dest_address": "0x320915BD0F1bad11cBf06e85D5199DBcAC4E9934",
"erc20_token": {
"amount": "102",
"contract": "0xAb5801a7D398351b8bE11C439e05C5B3259aeC9B"
},
"sender": "cosmos1qyqszqgpqyqszqgpqyqszqgpqyqszqgpjnp7du",
"id": "3"
}
],
"batch_nonce": "1",
"block": "1234567",
"token_contract": "0xAb5801a7D398351b8bE11C439e05C5B3259aeC9B"
}
}
`)
// TODO: this test is failing on the empty representation of valset members
assert.JSONEq(t, string(expectedJSON), string(batch), string(batch))
}
//nolint: exhaustivestruct
func TestLastBatchesRequest(t *testing.T) {
input := CreateTestEnv(t)
ctx := input.Context
createTestBatch(t, input)
createTestBatch(t, input)
lastBatches, err := lastBatchesRequest(ctx, input.GravityKeeper)
require.NoError(t, err)
expectedJSON := []byte(`[
{
"transactions": [
{
"erc20_fee": {
"amount": "3",
"contract": "0xAb5801a7D398351b8bE11C439e05C5B3259aeC9B"
},
"dest_address": "0x320915BD0F1bad11cBf06e85D5199DBcAC4E9934",
"erc20_token": {
"amount": "101",
"contract": "0xAb5801a7D398351b8bE11C439e05C5B3259aeC9B"
},
"sender": "cosmos1qyqszqgpqyqszqgpqyqszqgpqyqszqgpjnp7du",
"id": "6"
},
{
"erc20_fee": {
"amount": "2",
"contract": "0xAb5801a7D398351b8bE11C439e05C5B3259aeC9B"
},
"dest_address": "0x320915BD0F1bad11cBf06e85D5199DBcAC4E9934",
"erc20_token": {
"amount": "102",
"contract": "0xAb5801a7D398351b8bE11C439e05C5B3259aeC9B"
},
"sender": "cosmos1qyqszqgpqyqszqgpqyqszqgpqyqszqgpjnp7du",
"id": "7"
}
],
"batch_nonce": "2",
"block": "1234567",
"token_contract": "0xAb5801a7D398351b8bE11C439e05C5B3259aeC9B"
},
{
"transactions": [
{
"erc20_fee": {
"amount": "3",
"contract": "0xAb5801a7D398351b8bE11C439e05C5B3259aeC9B"
},
"dest_address": "0x320915BD0F1bad11cBf06e85D5199DBcAC4E9934",
"erc20_token": {
"amount": "101",
"contract": "0xAb5801a7D398351b8bE11C439e05C5B3259aeC9B"
},
"sender": "cosmos1qyqszqgpqyqszqgpqyqszqgpqyqszqgpjnp7du",
"id": "2"
},
{
"erc20_fee": {
"amount": "2",
"contract": "0xAb5801a7D398351b8bE11C439e05C5B3259aeC9B"
},
"dest_address": "0x320915BD0F1bad11cBf06e85D5199DBcAC4E9934",
"erc20_token": {
"amount": "102",
"contract": "0xAb5801a7D398351b8bE11C439e05C5B3259aeC9B"
},
"sender": "cosmos1qyqszqgpqyqszqgpqyqszqgpqyqszqgpjnp7du",
"id": "3"
}
],
"batch_nonce": "1",
"block": "1234567",
"token_contract": "0xAb5801a7D398351b8bE11C439e05C5B3259aeC9B"
}
]
`)
assert.JSONEq(t, string(expectedJSON), string(lastBatches), "json is equal")
}
//nolint: exhaustivestruct
// tests setting and querying eth address and orchestrator addresses
func TestQueryCurrentValset(t *testing.T) {
var (
ethAddress = "0xb462864E395d88d6bc7C5dd5F3F5eb4cc2599255"
valAddress sdk.ValAddress = bytes.Repeat([]byte{0x2}, sdk.AddrLen)
)
input := CreateTestEnv(t)
input.GravityKeeper.StakingKeeper = NewStakingKeeperMock(valAddress)
ctx := input.Context
input.GravityKeeper.SetEthAddressForValidator(ctx, valAddress, ethAddress)
currentValset := input.GravityKeeper.GetCurrentValset(ctx)
bridgeVal := types.BridgeValidator{EthereumAddress: ethAddress, Power: 4294967295}
expectedValset := types.NewValset(1, 1234567, []*types.BridgeValidator{&bridgeVal}, sdk.NewIntFromUint64(0), "0x0000000000000000000000000000000000000000")
assert.Equal(t, expectedValset, currentValset)
}
//nolint: exhaustivestruct
func TestQueryERC20ToDenom(t *testing.T) {
var (
erc20 = "0xb462864E395d88d6bc7C5dd5F3F5eb4cc2599255"
denom = "uatom"
)
response := types.QueryERC20ToDenomResponse{
Denom: denom,
CosmosOriginated: true,
}
input := CreateTestEnv(t)
ctx := input.Context
input.GravityKeeper.setCosmosOriginatedDenomToERC20(ctx, denom, erc20)
queriedDenom, err := queryERC20ToDenom(ctx, erc20, input.GravityKeeper)
require.NoError(t, err)
correctBytes, err := codec.MarshalJSONIndent(types.ModuleCdc, response)
require.NoError(t, err)
assert.Equal(t, correctBytes, queriedDenom)
}
//nolint: exhaustivestruct
func TestQueryDenomToERC20(t *testing.T) {
var (
erc20 = "0xb462864E395d88d6bc7C5dd5F3F5eb4cc2599255"
denom = "uatom"
)
response := types.QueryDenomToERC20Response{
Erc20: erc20,
CosmosOriginated: true,
}
input := CreateTestEnv(t)
ctx := input.Context
input.GravityKeeper.setCosmosOriginatedDenomToERC20(ctx, denom, erc20)
queriedERC20, err := queryDenomToERC20(ctx, denom, input.GravityKeeper)
require.NoError(t, err)
correctBytes, err := codec.MarshalJSONIndent(types.ModuleCdc, response)
require.NoError(t, err)
assert.Equal(t, correctBytes, queriedERC20)
}
//nolint: exhaustivestruct
func TestQueryPendingSendToEth(t *testing.T) {
input := CreateTestEnv(t)
ctx := input.Context
var (
now = time.Now().UTC()
mySender, _ = sdk.AccAddressFromBech32("cosmos1ahx7f8wyertuus9r20284ej0asrs085case3kn")
myReceiver = "0xd041c41EA1bf0F006ADBb6d2c9ef9D425dE5eaD7"
myTokenContractAddr = "0x429881672B9AE42b8EbA0E26cD9C73711b891Ca5" // Pickle
allVouchers = sdk.NewCoins(
types.NewERC20Token(99999, myTokenContractAddr).GravityCoin(),
)
)
// mint some voucher first
require.NoError(t, input.BankKeeper.MintCoins(ctx, types.ModuleName, allVouchers))
// set senders balance
input.AccountKeeper.NewAccountWithAddress(ctx, mySender)
require.NoError(t, input.BankKeeper.SetBalances(ctx, mySender, allVouchers))
// CREATE FIRST BATCH
// ==================
// add some TX to the pool
for i, v := range []uint64{2, 3, 2, 1} {
amount := types.NewERC20Token(uint64(i+100), myTokenContractAddr).GravityCoin()
fee := types.NewERC20Token(v, myTokenContractAddr).GravityCoin()
_, err := input.GravityKeeper.AddToOutgoingPool(ctx, mySender, myReceiver, amount, fee)
require.NoError(t, err)
// Should create:
// 1: amount 100, fee 2
// 2: amount 101, fee 3
// 3: amount 102, fee 2
// 4: amount 104, fee 1
}
// when
ctx = ctx.WithBlockTime(now)
// tx batch size is 2, so that some of them stay behind
// Should contain 2 and 3 from above
_, err := input.GravityKeeper.BuildOutgoingTXBatch(ctx, myTokenContractAddr, 2)
require.NoError(t, err)
// Should receive 1 and 4 unbatched, 2 and 3 batched in response
response, err := queryPendingSendToEth(ctx, mySender.String(), input.GravityKeeper)
require.NoError(t, err)
expectedJSON := []byte(`{
"transfers_in_batches": [
{
"id": "2",
"sender": "cosmos1ahx7f8wyertuus9r20284ej0asrs085case3kn",
"dest_address": "0xd041c41EA1bf0F006ADBb6d2c9ef9D425dE5eaD7",
"erc20_token": {
"contract": "0x429881672B9AE42b8EbA0E26cD9C73711b891Ca5",
"amount": "101"
},
"erc20_fee": {
"contract": "0x429881672B9AE42b8EbA0E26cD9C73711b891Ca5",
"amount": "3"
}
},
{
"id": "3",
"sender": "cosmos1ahx7f8wyertuus9r20284ej0asrs085case3kn",
"dest_address": "0xd041c41EA1bf0F006ADBb6d2c9ef9D425dE5eaD7",
"erc20_token": {
"contract": "0x429881672B9AE42b8EbA0E26cD9C73711b891Ca5",
"amount": "102"
},
"erc20_fee": {
"contract": "0x429881672B9AE42b8EbA0E26cD9C73711b891Ca5",
"amount": "2"
}
}
],
"unbatched_transfers": [
{
"id": "1",
"sender": "cosmos1ahx7f8wyertuus9r20284ej0asrs085case3kn",
"dest_address": "0xd041c41EA1bf0F006ADBb6d2c9ef9D425dE5eaD7",
"erc20_token": {
"contract": "0x429881672B9AE42b8EbA0E26cD9C73711b891Ca5",
"amount": "100"
},
"erc20_fee": {
"contract": "0x429881672B9AE42b8EbA0E26cD9C73711b891Ca5",
"amount": "2"
}
},
{
"id": "4",
"sender": "cosmos1ahx7f8wyertuus9r20284ej0asrs085case3kn",
"dest_address": "0xd041c41EA1bf0F006ADBb6d2c9ef9D425dE5eaD7",
"erc20_token": {
"contract": "0x429881672B9AE42b8EbA0E26cD9C73711b891Ca5",
"amount": "103"
},
"erc20_fee": {
"contract": "0x429881672B9AE42b8EbA0E26cD9C73711b891Ca5",
"amount": "1"
}
}
]}
`)
assert.JSONEq(t, string(expectedJSON), string(response), "json is equal")
}
| TestQueryAllBatchConfirms |
unit_tests.rs | // Copyright (c) The Libra Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::{
errors::{JsonRpcError, ServerCode},
tests::{
genesis::generate_genesis_state,
utils::{test_bootstrap, MockLibraDB},
},
};
use futures::{
channel::{
mpsc::{channel, Receiver},
oneshot,
},
StreamExt,
};
use libra_config::utils;
use libra_crypto::{ed25519::Ed25519PrivateKey, hash::CryptoHash, HashValue, PrivateKey, Uniform};
use libra_json_rpc_client::{
views::{
AccountStateWithProofView, AccountView, BlockMetadata, BytesView, EventView,
StateProofView, TransactionDataView, TransactionView, VMStatusView,
},
JsonRpcAsyncClient, JsonRpcBatch, JsonRpcResponse, ResponseAsView,
};
use libra_mempool::SubmissionStatus;
use libra_proptest_helpers::ValueGenerator;
use libra_types::{
account_address::AccountAddress,
account_config::{
from_currency_code_string, libra_root_address, testnet_dd_account_address, AccountResource,
FreezingBit, LBR_NAME,
},
account_state::AccountState,
account_state_blob::{AccountStateBlob, AccountStateWithProof},
chain_id::ChainId,
contract_event::ContractEvent,
event::EventKey,
ledger_info::LedgerInfoWithSignatures,
mempool_status::{MempoolStatus, MempoolStatusCode},
proof::{SparseMerkleProof, TransactionAccumulatorProof, TransactionInfoWithProof},
test_helpers::transaction_test_helpers::get_test_signed_txn,
transaction::{SignedTransaction, Transaction, TransactionInfo, TransactionPayload},
vm_status::StatusCode,
};
use libradb::test_helper::arb_blocks_to_commit;
use move_core_types::{
language_storage::TypeTag,
move_resource::MoveResource,
value::{MoveStructLayout, MoveTypeLayout},
};
use move_vm_types::values::{Struct, Value};
use proptest::prelude::*;
use std::{
cmp::{max, min},
collections::HashMap,
convert::TryFrom,
str::FromStr,
sync::Arc,
};
use storage_interface::DbReader;
use tokio::runtime::Runtime;
use vm_validator::{
mocks::mock_vm_validator::MockVMValidator, vm_validator::TransactionValidation,
};
use serde_json::json;
// returns MockLibraDB for unit-testing
fn mock_db() -> MockLibraDB {
let mut gen = ValueGenerator::new();
let blocks = gen.generate(arb_blocks_to_commit());
let mut account_state_with_proof = gen.generate(any::<AccountStateWithProof>());
let mut version = 1;
let mut all_accounts = HashMap::new();
let mut all_txns = vec![];
let mut events = vec![];
let mut timestamps = vec![0 as u64];
for (txns_to_commit, ledger_info_with_sigs) in &blocks {
for (idx, txn) in txns_to_commit.iter().enumerate() {
timestamps.push(ledger_info_with_sigs.ledger_info().timestamp_usecs());
events.extend(
txn.events()
.iter()
.map(|e| ((idx + version) as u64, e.clone())),
);
}
version += txns_to_commit.len();
let mut account_states = HashMap::new();
// Get the ground truth of account states.
txns_to_commit.iter().for_each(|txn_to_commit| {
account_states.extend(txn_to_commit.account_states().clone())
});
// Record all account states.
for (address, blob) in account_states.into_iter() {
let mut state = AccountState::try_from(&blob).unwrap();
let freezing_bit = Value::struct_(Struct::pack(vec![Value::bool(false)], true))
.value_as::<Struct>()
.unwrap()
.simple_serialize(&MoveStructLayout::new(vec![MoveTypeLayout::Bool]))
.unwrap();
state.insert(FreezingBit::resource_path(), freezing_bit);
all_accounts.insert(address, AccountStateBlob::try_from(&state).unwrap());
}
// Record all transactions.
all_txns.extend(txns_to_commit.iter().map(|txn_to_commit| {
(
txn_to_commit.transaction().clone(),
txn_to_commit.status().clone(),
)
}));
}
if account_state_with_proof.blob.is_none() {
let (_, blob) = all_accounts.iter().next().unwrap();
account_state_with_proof.blob = Some(blob.clone());
}
let account_state_with_proof = vec![account_state_with_proof];
if events.is_empty() {
// mock the first event
let mock_event = ContractEvent::new(
EventKey::new_from_address(&AccountAddress::random(), 0),
0,
TypeTag::Bool,
b"event_data".to_vec(),
);
events.push((version as u64, mock_event));
}
let (genesis, _) = generate_genesis_state();
MockLibraDB {
version: version as u64,
genesis,
all_accounts,
all_txns,
events,
account_state_with_proof,
timestamps,
}
}
#[test]
fn test_json_rpc_url() {
let (_mock_db, _runtime, url, _) = create_db_and_runtime();
let client = reqwest::blocking::Client::new();
// check that only root path is accessible
let fake_path = format!("{}/fake_path", url);
let resp = client.get(&fake_path).send().unwrap();
assert_eq!(resp.status(), 404);
// only post method is allowed
let resp = client.get(&url).send().unwrap();
assert_eq!(resp.status(), 405, "{}", url);
// empty payload is not allowed
let resp = client.post(&url).send().unwrap();
assert_eq!(resp.status(), 400);
// For now /v1 and / are both supported
let url_v1 = format!("{}/v1", url);
let resp = client.post(&url_v1).send().unwrap();
assert_eq!(resp.status(), 400);
let resp = client.post(&url).body("non json").send().unwrap();
assert_eq!(resp.status(), 400);
}
#[test]
fn test_json_rpc_protocol_invalid_requests() {
let (mock_db, _runtime, url, _) = create_db_and_runtime();
let client = reqwest::blocking::Client::new();
let version = mock_db.version;
let timestamp = mock_db.get_block_timestamp(version).unwrap();
let calls = vec![
(
"invalid protocol version",
json!({"jsonrpc": "1.0", "method": "get_metadata", "params": [], "id": 1}),
json!({
"error": {
"code": -32600, "data": null, "message": "Invalid Request",
},
"id": 1,
"jsonrpc": "2.0",
"libra_chain_id": ChainId::test().id(),
"libra_ledger_timestampusec": timestamp,
"libra_ledger_version": version
}),
),
(
"invalid request format: invalid id type",
json!({"jsonrpc": "2.0", "method": "get_metadata", "params": [], "id": true}),
json!({
"error": {
"code": -32604, "data": null, "message": "Invalid request format",
},
"id": null,
"jsonrpc": "2.0",
"libra_chain_id": ChainId::test().id(),
"libra_ledger_timestampusec": timestamp,
"libra_ledger_version": version
}),
),
(
"method not found",
json!({"jsonrpc": "2.0", "method": "add", "params": [], "id": 1}),
json!({
"error": {
"code": -32601, "data": null, "message": "Method not found",
},
"id": 1,
"jsonrpc": "2.0",
"libra_chain_id": ChainId::test().id(),
"libra_ledger_timestampusec": timestamp,
"libra_ledger_version": version
}),
),
(
"invalid arguments: too many arguments",
json!({"jsonrpc": "2.0", "method": "get_account", "params": [1, 2], "id": 1}), | json!({
"error": {
"code": -32602,
"message": "Invalid params: wrong number of arguments (given 2, expected 1)",
"data": null
},
"id": 1,
"jsonrpc": "2.0",
"libra_chain_id": ChainId::test().id(),
"libra_ledger_timestampusec": timestamp,
"libra_ledger_version": version
}),
),
(
"invalid arguments: not enough arguments",
json!({"jsonrpc": "2.0", "method": "get_account", "params": [], "id": 1}),
json!({
"error": {
"code": -32602,
"message": "Invalid params: wrong number of arguments (given 0, expected 1)",
"data": null
},
"id": 1,
"jsonrpc": "2.0",
"libra_chain_id": ChainId::test().id(),
"libra_ledger_timestampusec": timestamp,
"libra_ledger_version": version
}),
),
(
"invalid arguments: too many arguments for a method has optional arguments",
json!({"jsonrpc": "2.0", "method": "get_metadata", "params": [1, 2], "id": 1}),
json!({
"error": {
"code": -32602,
"message": "Invalid params: wrong number of arguments (given 2, expected 0..1)",
"data": null
},
"id": 1,
"jsonrpc": "2.0",
"libra_chain_id": ChainId::test().id(),
"libra_ledger_timestampusec": timestamp,
"libra_ledger_version": version
}),
),
(
"get_account invalid param data type",
json!({"jsonrpc": "2.0", "method": "get_account", "params": [false], "id": 1}),
json!({
"error": {
"code": -32602,
"message": "Invalid param account address(params[0]): should be hex-encoded string",
"data": null
},
"id": 1,
"jsonrpc": "2.0",
"libra_chain_id": ChainId::test().id(),
"libra_ledger_timestampusec": timestamp,
"libra_ledger_version": version
}),
),
(
"get_account invalid account address str",
json!({"jsonrpc": "2.0", "method": "get_account", "params": ["helloworld"], "id": 1}),
json!({
"error": {
"code": -32602,
"message": "Invalid param account address(params[0]): should be hex-encoded string",
"data": null
},
"id": 1,
"jsonrpc": "2.0",
"libra_chain_id": ChainId::test().id(),
"libra_ledger_timestampusec": timestamp,
"libra_ledger_version": version
}),
),
(
"submit invalid data",
json!({"jsonrpc": "2.0", "method": "submit", "params": ["helloworld"], "id": 1}),
json!({
"error": {
"code": -32602,
"message": "Invalid param data(params[0]): should be hex-encoded string of LCS serialized Libra SignedTransaction type",
"data": null
},
"id": 1,
"jsonrpc": "2.0",
"libra_chain_id": ChainId::test().id(),
"libra_ledger_timestampusec": timestamp,
"libra_ledger_version": version
}),
),
(
"get_transactions: invalid start_version param",
json!({"jsonrpc": "2.0", "method": "get_transactions", "params": ["helloworld", 1, true], "id": 1}),
json!({
"error": {
"code": -32602,
"message": "Invalid param start_version(params[0]): should be unsigned int64",
"data": null
},
"id": 1,
"jsonrpc": "2.0",
"libra_chain_id": ChainId::test().id(),
"libra_ledger_timestampusec": timestamp,
"libra_ledger_version": version
}),
),
(
"get_transactions: invalid limit param",
json!({"jsonrpc": "2.0", "method": "get_transactions", "params": [1, false, true], "id": 1}),
json!({
"error": {
"code": -32602,
"message": "Invalid param limit(params[1]): should be unsigned int64",
"data": null
},
"id": 1,
"jsonrpc": "2.0",
"libra_chain_id": ChainId::test().id(),
"libra_ledger_timestampusec": timestamp,
"libra_ledger_version": version
}),
),
(
"get_transactions: invalid include_events param",
json!({"jsonrpc": "2.0", "method": "get_transactions", "params": [1, 10, "true"], "id": 1}),
json!({
"error": {
"code": -32602,
"message": "Invalid param include_events(params[2]): should be boolean",
"data": null
},
"id": 1,
"jsonrpc": "2.0",
"libra_chain_id": ChainId::test().id(),
"libra_ledger_timestampusec": timestamp,
"libra_ledger_version": version
}),
),
(
"get_events: invalid event_key type",
json!({"jsonrpc": "2.0", "method": "get_events", "params": [false, 1, 10], "id": 1}),
json!({
"error": {
"code": -32602,
"message": "Invalid param event key(params[0]): should be hex-encoded string",
"data": null
},
"id": 1,
"jsonrpc": "2.0",
"libra_chain_id": ChainId::test().id(),
"libra_ledger_timestampusec": timestamp,
"libra_ledger_version": version
}),
),
(
"get_events: event_key is not hex-encoded string",
json!({"jsonrpc": "2.0", "method": "get_events", "params": ["helloworld", 1, 10], "id": 1}),
json!({
"error": {
"code": -32602,
"message": "Invalid param event key(params[0]): should be hex-encoded string",
"data": null
},
"id": 1,
"jsonrpc": "2.0",
"libra_chain_id": ChainId::test().id(),
"libra_ledger_timestampusec": timestamp,
"libra_ledger_version": version
}),
),
(
"get_events: invalid start param",
json!({"jsonrpc": "2.0", "method": "get_events", "params": ["13000000000000000000000000000000000000000a550c18", false, 1], "id": 1}),
json!({
"error": {
"code": -32602,
"message": "Invalid param start(params[1]): should be unsigned int64",
"data": null
},
"id": 1,
"jsonrpc": "2.0",
"libra_chain_id": ChainId::test().id(),
"libra_ledger_timestampusec": timestamp,
"libra_ledger_version": version
}),
),
(
"get_events: invalid limit param",
json!({"jsonrpc": "2.0", "method": "get_events", "params": ["13000000000000000000000000000000000000000a550c18", 1, "invalid"], "id": 1}),
json!({
"error": {
"code": -32602,
"message": "Invalid param limit(params[2]): should be unsigned int64",
"data": null
},
"id": 1,
"jsonrpc": "2.0",
"libra_chain_id": ChainId::test().id(),
"libra_ledger_timestampusec": timestamp,
"libra_ledger_version": version
}),
),
(
"get_account_transaction: invalid account",
json!({"jsonrpc": "2.0", "method": "get_account_transaction", "params": ["invalid", 1, false], "id": 1}),
json!({
"error": {
"code": -32602,
"message": "Invalid param account address(params[0]): should be hex-encoded string",
"data": null
},
"id": 1,
"jsonrpc": "2.0",
"libra_chain_id": ChainId::test().id(),
"libra_ledger_timestampusec": timestamp,
"libra_ledger_version": version
}),
),
(
"get_account_transaction: invalid sequence",
json!({"jsonrpc": "2.0", "method": "get_account_transaction", "params": ["e1b3d22871989e9fd9dc6814b2f4fc41", false, false], "id": 1}),
json!({
"error": {
"code": -32602,
"message": "Invalid param account sequence number(params[1]): should be unsigned int64",
"data": null
},
"id": 1,
"jsonrpc": "2.0",
"libra_chain_id": ChainId::test().id(),
"libra_ledger_timestampusec": timestamp,
"libra_ledger_version": version
}),
),
(
"get_account_transaction: invalid include_event",
json!({"jsonrpc": "2.0", "method": "get_account_transaction", "params": ["e1b3d22871989e9fd9dc6814b2f4fc41", 1, "false"], "id": 1}),
json!({
"error": {
"code": -32602,
"message": "Invalid param include_events(params[2]): should be boolean",
"data": null
},
"id": 1,
"jsonrpc": "2.0",
"libra_chain_id": ChainId::test().id(),
"libra_ledger_timestampusec": timestamp,
"libra_ledger_version": version
}),
),
(
"get_account_transactions: invalid account",
json!({"jsonrpc": "2.0", "method": "get_account_transactions", "params": ["invalid", 1, 2, false], "id": 1}),
json!({
"error": {
"code": -32602,
"message": "Invalid param account address(params[0]): should be hex-encoded string",
"data": null
},
"id": 1,
"jsonrpc": "2.0",
"libra_chain_id": ChainId::test().id(),
"libra_ledger_timestampusec": timestamp,
"libra_ledger_version": version
}),
),
(
"get_account_transactions: invalid start param",
json!({"jsonrpc": "2.0", "method": "get_account_transactions", "params": ["e1b3d22871989e9fd9dc6814b2f4fc41", false, 2, false], "id": 1}),
json!({
"error": {
"code": -32602,
"message": "Invalid param start(params[1]): should be unsigned int64",
"data": null
},
"id": 1,
"jsonrpc": "2.0",
"libra_chain_id": ChainId::test().id(),
"libra_ledger_timestampusec": timestamp,
"libra_ledger_version": version
}),
),
(
"get_account_transactions: invalid limit param",
json!({"jsonrpc": "2.0", "method": "get_account_transactions", "params": ["e1b3d22871989e9fd9dc6814b2f4fc41", 1, "invalid", false], "id": 1}),
json!({
"error": {
"code": -32602,
"message": "Invalid param limit(params[2]): should be unsigned int64",
"data": null
},
"id": 1,
"jsonrpc": "2.0",
"libra_chain_id": ChainId::test().id(),
"libra_ledger_timestampusec": timestamp,
"libra_ledger_version": version
}),
),
(
"get_account_transactions: invalid include_event",
json!({"jsonrpc": "2.0", "method": "get_account_transactions", "params": ["e1b3d22871989e9fd9dc6814b2f4fc41", 1, 5, "false"], "id": 1}),
json!({
"error": {
"code": -32602,
"message": "Invalid param include_events(params[3]): should be boolean",
"data": null
},
"id": 1,
"jsonrpc": "2.0",
"libra_chain_id": ChainId::test().id(),
"libra_ledger_timestampusec": timestamp,
"libra_ledger_version": version
}),
),
(
"get_state_proof: invalid known_version",
json!({"jsonrpc": "2.0", "method": "get_state_proof", "params": ["invalid"], "id": 1}),
json!({
"error": {
"code": -32602,
"message": "Invalid param known version(params[0]): should be unsigned int64",
"data": null
},
"id": 1,
"jsonrpc": "2.0",
"libra_chain_id": ChainId::test().id(),
"libra_ledger_timestampusec": timestamp,
"libra_ledger_version": version
}),
),
(
"get_account_state_with_proof: invalid account address",
json!({"jsonrpc": "2.0", "method": "get_account_state_with_proof", "params": ["invalid", 1, 1], "id": 1}),
json!({
"error": {
"code": -32602,
"message": "Invalid param account address(params[0]): should be hex-encoded string",
"data": null
},
"id": 1,
"jsonrpc": "2.0",
"libra_chain_id": ChainId::test().id(),
"libra_ledger_timestampusec": timestamp,
"libra_ledger_version": version
}),
),
];
for (name, request, expected) in calls {
let resp = client.post(&url).json(&request).send().unwrap();
assert_eq!(resp.status(), 200);
let resp_json: serde_json::Value = resp.json().unwrap();
assert_eq!(expected, resp_json, "test: {}", name);
}
}
#[test]
fn test_json_rpc_protocol() {
let (mock_db, _runtime, url, _) = create_db_and_runtime();
let version = mock_db.version;
let timestamp = mock_db.get_block_timestamp(version).unwrap();
let calls = vec![
(
"get_currencies",
json!({"jsonrpc": "2.0", "method": "get_currencies", "params": [], "id": 1}),
json!({
"id": 1,
"jsonrpc": "2.0",
"libra_chain_id": ChainId::test().id(),
"libra_ledger_timestampusec": timestamp,
"libra_ledger_version": version,
"result": [
{
"burn_events_key": "02000000000000000000000000000000000000000a550c18",
"cancel_burn_events_key": "04000000000000000000000000000000000000000a550c18",
"code": "Coin1",
"exchange_rate_update_events_key": "05000000000000000000000000000000000000000a550c18",
"fractional_part": 100,
"mint_events_key": "01000000000000000000000000000000000000000a550c18",
"preburn_events_key": "03000000000000000000000000000000000000000a550c18",
"scaling_factor": 1000000,
"to_lbr_exchange_rate": 0.5
},
{
"burn_events_key": "07000000000000000000000000000000000000000a550c18",
"cancel_burn_events_key": "09000000000000000000000000000000000000000a550c18",
"code": "Coin2",
"exchange_rate_update_events_key": "0a000000000000000000000000000000000000000a550c18",
"fractional_part": 100,
"mint_events_key": "06000000000000000000000000000000000000000a550c18",
"preburn_events_key": "08000000000000000000000000000000000000000a550c18",
"scaling_factor": 1000000,
"to_lbr_exchange_rate": 0.5
},
{
"burn_events_key": "0c000000000000000000000000000000000000000a550c18",
"cancel_burn_events_key": "0e000000000000000000000000000000000000000a550c18",
"code": "LBR",
"exchange_rate_update_events_key": "0f000000000000000000000000000000000000000a550c18",
"fractional_part": 1000,
"mint_events_key": "0b000000000000000000000000000000000000000a550c18",
"preburn_events_key": "0d000000000000000000000000000000000000000a550c18",
"scaling_factor": 1000000,
"to_lbr_exchange_rate": 1.0
}
]
}),
),
(
"get_metadata without version parameter",
json!({"jsonrpc": "2.0", "method": "get_metadata", "params": [], "id": 1}),
json!({
"id": 1,
"jsonrpc": "2.0",
"libra_chain_id": ChainId::test().id(),
"libra_ledger_timestampusec": timestamp,
"libra_ledger_version": version,
"result": {
"timestamp": timestamp,
"version": version
}
}),
),
(
"get_metadata with version",
json!({"jsonrpc": "2.0", "method": "get_metadata", "params": [0], "id": 1}),
json!({
"id": 1,
"jsonrpc": "2.0",
"libra_chain_id": ChainId::test().id(),
"libra_ledger_timestampusec": timestamp,
"libra_ledger_version": version,
"result": {
"timestamp": 0,
"version": 0
}
}),
),
(
"get_account: root account",
json!({"jsonrpc": "2.0", "method": "get_account", "params": [libra_root_address().to_string()], "id": 1}),
json!({
"id": 1,
"jsonrpc": "2.0",
"libra_chain_id": ChainId::test().id(),
"libra_ledger_timestampusec": timestamp,
"libra_ledger_version": version,
"result": {
"authentication_key": "1304972f9242cbc3528a1e286323471ab891baa37e0053b85651693a79854a00",
"balances": [],
"delegated_key_rotation_capability": false,
"delegated_withdrawal_capability": false,
"is_frozen": false,
"received_events_key": "12000000000000000000000000000000000000000a550c18",
"role": "unknown",
"sent_events_key": "13000000000000000000000000000000000000000a550c18",
"sequence_number": 1
}
}),
),
(
"get_account: testnet dd account",
json!({"jsonrpc": "2.0", "method": "get_account", "params": [testnet_dd_account_address().to_string()], "id": 1}),
json!({
"id": 1,
"jsonrpc": "2.0",
"libra_chain_id": ChainId::test().id(),
"libra_ledger_timestampusec": timestamp,
"libra_ledger_version": version,
"result": {
"authentication_key": "1304972f9242cbc3528a1e286323471ab891baa37e0053b85651693a79854a00",
"balances": [
{
"amount": 4611686018427387903 as u64,
"currency": "Coin1"
},
{
"amount": 4611686018427387903 as u64,
"currency": "Coin2"
},
{
"amount": 9223372036854775807 as u64,
"currency": "LBR"
}
],
"delegated_key_rotation_capability": false,
"delegated_withdrawal_capability": false,
"is_frozen": false,
"received_events_key": "0100000000000000000000000000000000000000000000dd",
"role": {
"designated_dealer": {
"base_url": "https://libra.org",
"compliance_key": "f66bf0ce5ceb582b93d6780820c2025b9967aedaa259bdbb9f3d0297eced0e18",
"expiration_time": 18446744073709551615 as u64,
"human_name": "moneybags",
"preburn_balances": [
{
"amount": 0,
"currency": "Coin1"
},
{
"amount": 0,
"currency": "Coin2"
}
],
"received_mint_events_key": "0000000000000000000000000000000000000000000000dd"
}
},
"sent_events_key": "0200000000000000000000000000000000000000000000dd",
"sequence_number": 0
}
}),
),
];
let client = reqwest::blocking::Client::new();
for (name, request, expected) in calls {
let resp = client.post(&url).json(&request).send().unwrap();
assert_eq!(resp.status(), 200);
let resp_json: serde_json::Value = resp.json().unwrap();
assert_eq!(expected, resp_json, "test: {}", name);
}
}
#[test]
fn test_transaction_submission() {
let (mp_sender, mut mp_events) = channel(1);
let mock_db = mock_db();
let port = utils::get_available_port();
let address = format!("0.0.0.0:{}", port);
let mut runtime = test_bootstrap(address.parse().unwrap(), Arc::new(mock_db), mp_sender);
let client = JsonRpcAsyncClient::new(
reqwest::Url::from_str(format!("http://{}:{}/v1", "127.0.0.1", port).as_str())
.expect("invalid url"),
);
// future that mocks shared mempool execution
runtime.spawn(async move {
let validator = MockVMValidator;
while let Some((txn, cb)) = mp_events.next().await {
let vm_status = validator.validate_transaction(txn).unwrap().status();
let result = if vm_status.is_some() {
(MempoolStatus::new(MempoolStatusCode::VmError), vm_status)
} else {
(MempoolStatus::new(MempoolStatusCode::Accepted), None)
};
cb.send(Ok(result)).unwrap();
}
});
// closure that checks transaction submission for given account
let mut txn_submission = move |sender| {
let privkey = Ed25519PrivateKey::generate_for_testing();
let txn = get_test_signed_txn(sender, 0, &privkey, privkey.public_key(), None);
let mut batch = JsonRpcBatch::default();
batch.add_submit_request(txn).unwrap();
runtime.block_on(client.execute(batch)).unwrap()
};
// check successful submission
let sender = AccountAddress::new([9; AccountAddress::LENGTH]);
assert!(txn_submission(sender)[0].as_ref().unwrap() == &JsonRpcResponse::SubmissionResponse);
// check vm error submission
let sender = AccountAddress::new([0; AccountAddress::LENGTH]);
let response = &txn_submission(sender)[0];
if let Err(e) = response {
if let Some(error) = e.downcast_ref::<JsonRpcError>() {
assert_eq!(error.code, ServerCode::VmValidationError as i16);
let status_code: StatusCode = error.as_status_code().unwrap();
assert_eq!(status_code, StatusCode::SENDING_ACCOUNT_DOES_NOT_EXIST);
} else {
panic!("unexpected error format");
}
} else {
panic!("expected error");
}
}
#[test]
fn test_get_account() {
let (mock_db, client, mut runtime) = create_database_client_and_runtime();
// test case 1: single call
let (first_account, blob) = mock_db.all_accounts.iter().next().unwrap();
let expected_resource = AccountState::try_from(blob).unwrap();
let mut batch = JsonRpcBatch::default();
batch.add_get_account_request(*first_account);
let result = execute_batch_and_get_first_response(&client, &mut runtime, batch);
let account = AccountView::optional_from_response(result)
.unwrap()
.expect("account does not exist");
let account_balances: Vec<_> = account.balances.iter().map(|bal| bal.amount).collect();
let expected_resource_balances: Vec<_> = expected_resource
.get_balance_resources(&[from_currency_code_string(LBR_NAME).unwrap()])
.unwrap()
.iter()
.map(|(_, bal_resource)| bal_resource.coin())
.collect();
assert_eq!(account_balances, expected_resource_balances);
assert_eq!(
account.sequence_number,
expected_resource
.get_account_resource()
.unwrap()
.unwrap()
.sequence_number()
);
// test case 2: batch call
let mut batch = JsonRpcBatch::default();
let mut states = vec![];
for (account, blob) in mock_db.all_accounts.iter() {
if account == first_account {
continue;
}
states.push(AccountState::try_from(blob).unwrap());
batch.add_get_account_request(*account);
}
let responses = runtime.block_on(client.execute(batch)).unwrap();
assert_eq!(responses.len(), states.len());
for (idx, response) in responses.into_iter().enumerate() {
let account = AccountView::optional_from_response(response.expect("error in response"))
.unwrap()
.expect("account does not exist");
let account_balances: Vec<_> = account.balances.iter().map(|bal| bal.amount).collect();
let expected_resource_balances: Vec<_> = states[idx]
.get_balance_resources(&[from_currency_code_string(LBR_NAME).unwrap()])
.unwrap()
.iter()
.map(|(_, bal_resource)| bal_resource.coin())
.collect();
assert_eq!(account_balances, expected_resource_balances);
assert_eq!(
account.sequence_number,
states[idx]
.get_account_resource()
.unwrap()
.unwrap()
.sequence_number()
);
}
}
#[test]
fn test_get_metadata_latest() {
let (mock_db, client, mut runtime) = create_database_client_and_runtime();
let (actual_version, actual_timestamp) = mock_db.get_latest_commit_metadata().unwrap();
let mut batch = JsonRpcBatch::default();
batch.add_get_metadata_request(None);
let result = execute_batch_and_get_first_response(&client, &mut runtime, batch);
let result_view = BlockMetadata::from_response(result).unwrap();
assert_eq!(result_view.version, actual_version);
assert_eq!(result_view.timestamp, actual_timestamp);
}
#[test]
fn test_get_metadata() {
let (mock_db, client, mut runtime) = create_database_client_and_runtime();
let mut batch = JsonRpcBatch::default();
batch.add_get_metadata_request(Some(1));
let result = execute_batch_and_get_first_response(&client, &mut runtime, batch);
let result_view = BlockMetadata::from_response(result).unwrap();
assert_eq!(result_view.version, 1);
assert_eq!(result_view.timestamp, mock_db.timestamps[1]);
}
#[test]
fn test_limit_batch_size() {
let (_, client, mut runtime) = create_database_client_and_runtime();
let mut batch = JsonRpcBatch::default();
for i in 0..21 {
batch.add_get_metadata_request(Some(i));
}
let ret = runtime.block_on(client.execute(batch));
assert!(ret.is_err());
let expected = "JsonRpcError JsonRpcError { code: -32600, message: \"Invalid Request: batch size = 21, exceed limit 20\", data: None }";
assert_eq!(ret.unwrap_err().to_string(), expected)
}
#[test]
fn test_get_events_page_limit() {
let (_, client, mut runtime) = create_database_client_and_runtime();
let mut batch = JsonRpcBatch::default();
batch.add_get_events_request(
"13000000000000000000000000000000000000000a550c18".to_string(),
0,
1001,
);
let ret = runtime.block_on(client.execute(batch)).unwrap().remove(0);
assert!(ret.is_err());
let expected = "JsonRpcError { code: -32600, message: \"Invalid Request: page size = 1001, exceed limit 1000\", data: None }";
assert_eq!(ret.unwrap_err().to_string(), expected)
}
#[test]
fn test_get_transactions_page_limit() {
let (_, client, mut runtime) = create_database_client_and_runtime();
let mut batch = JsonRpcBatch::default();
batch.add_get_transactions_request(0, 1001, false);
let ret = runtime.block_on(client.execute(batch)).unwrap().remove(0);
assert!(ret.is_err());
let expected = "JsonRpcError { code: -32600, message: \"Invalid Request: page size = 1001, exceed limit 1000\", data: None }";
assert_eq!(ret.unwrap_err().to_string(), expected)
}
#[test]
fn test_get_events() {
let (mock_db, client, mut runtime) = create_database_client_and_runtime();
let event_index = 0;
let mock_db_events = mock_db.events;
let (first_event_version, first_event) = mock_db_events[event_index].clone();
let event_key = hex::encode(first_event.key().as_bytes());
let mut batch = JsonRpcBatch::default();
batch.add_get_events_request(
event_key,
first_event.sequence_number(),
first_event.sequence_number() + 10,
);
let result = execute_batch_and_get_first_response(&client, &mut runtime, batch);
let events = EventView::vec_from_response(result).unwrap();
let fetched_event = &events[event_index];
assert_eq!(
fetched_event.sequence_number,
first_event.sequence_number(),
"Seq number wrong"
);
assert_eq!(
fetched_event.transaction_version, first_event_version,
"Tx version wrong"
);
}
#[test]
fn test_get_transactions() {
let (mock_db, client, mut runtime) = create_database_client_and_runtime();
let version = mock_db.get_latest_version().unwrap();
let page = 800usize;
for base_version in (0..version)
.map(u64::from)
.take(page)
.collect::<Vec<_>>()
.into_iter()
{
let mut batch = JsonRpcBatch::default();
batch.add_get_transactions_request(base_version, page as u64, true);
let result = execute_batch_and_get_first_response(&client, &mut runtime, batch);
let txns = TransactionView::vec_from_response(result).unwrap();
for (i, view) in txns.iter().enumerate() {
let version = base_version + i as u64;
assert_eq!(view.version, version);
let (tx, status) = &mock_db.all_txns[version as usize];
assert_eq!(view.hash, tx.hash().to_hex());
// Check we returned correct events
let expected_events = mock_db
.events
.iter()
.filter(|(v, _)| *v == view.version)
.map(|(_, e)| e)
.collect::<Vec<_>>();
assert_eq!(expected_events.len(), view.events.len());
assert_eq!(VMStatusView::from(status), view.vm_status);
for (i, event_view) in view.events.iter().enumerate() {
let expected_event = expected_events.get(i).expect("Expected event didn't find");
assert_eq!(event_view.sequence_number, expected_event.sequence_number());
assert_eq!(event_view.transaction_version, version);
assert_eq!(
event_view.key.0,
BytesView::from(expected_event.key().as_bytes()).0
);
// TODO: check event_data
}
match tx {
Transaction::BlockMetadata(t) => match view.transaction {
TransactionDataView::BlockMetadata { timestamp_usecs } => {
assert_eq!(t.clone().into_inner().unwrap().1, timestamp_usecs);
}
_ => panic!("Returned value doesn't match!"),
},
Transaction::GenesisTransaction(_) => match view.transaction {
TransactionDataView::WriteSet { .. } => {}
_ => panic!("Returned value doesn't match!"),
},
Transaction::UserTransaction(t) => match &view.transaction {
TransactionDataView::UserTransaction {
sender,
script_hash,
chain_id,
..
} => {
assert_eq!(&t.sender().to_string(), sender);
assert_eq!(&t.chain_id().id(), chain_id);
// TODO: verify every field
if let TransactionPayload::Script(s) = t.payload() {
assert_eq!(script_hash, &HashValue::sha3_256_of(s.code()).to_hex());
}
}
_ => panic!("Returned value doesn't match!"),
},
}
}
}
}
#[test]
fn test_get_account_transaction() {
let (mock_db, client, mut runtime) = create_database_client_and_runtime();
for (acc, blob) in mock_db.all_accounts.iter() {
let ar = AccountResource::try_from(blob).unwrap();
for seq in 1..ar.sequence_number() {
let mut batch = JsonRpcBatch::default();
batch.add_get_account_transaction_request(*acc, seq, true);
let result = execute_batch_and_get_first_response(&client, &mut runtime, batch);
let tx_view = TransactionView::optional_from_response(result)
.unwrap()
.expect("Transaction didn't exists!");
let (expected_tx, expected_status) = mock_db
.all_txns
.iter()
.find_map(|(t, status)| {
if let Ok(x) = t.as_signed_user_txn() {
if x.sender() == *acc && x.sequence_number() == seq {
assert_eq!(tx_view.hash, t.hash().to_hex());
return Some((x, status));
}
}
None
})
.expect("Couldn't find tx");
// Check we returned correct events
let expected_events = mock_db
.events
.iter()
.filter(|(ev, _)| *ev == tx_view.version)
.map(|(_, e)| e)
.collect::<Vec<_>>();
assert_eq!(tx_view.events.len(), expected_events.len());
// check VM status
assert_eq!(tx_view.vm_status, VMStatusView::from(expected_status));
for (i, event_view) in tx_view.events.iter().enumerate() {
let expected_event = expected_events.get(i).expect("Expected event didn't find");
assert_eq!(event_view.sequence_number, expected_event.sequence_number());
assert_eq!(event_view.transaction_version, tx_view.version);
assert_eq!(
event_view.key.0,
BytesView::from(expected_event.key().as_bytes()).0
);
// TODO: check event_data
}
let tx_data_view = tx_view.transaction;
// Always user transaction
match tx_data_view {
TransactionDataView::UserTransaction {
sender,
sequence_number,
script_hash,
..
} => {
assert_eq!(acc.to_string(), sender);
assert_eq!(seq, sequence_number);
if let TransactionPayload::Script(s) = expected_tx.payload() {
assert_eq!(script_hash, HashValue::sha3_256_of(s.code()).to_hex());
}
}
_ => panic!("wrong type"),
}
}
}
}
#[test]
fn test_get_account_transactions() {
let (mock_db, client, mut runtime) = create_database_client_and_runtime();
for (acc, blob) in mock_db.all_accounts.iter() {
let total = AccountResource::try_from(blob).unwrap().sequence_number();
let mut batch = JsonRpcBatch::default();
batch.add_get_account_transactions_request(*acc, 0, max(1, min(1000, total * 2)), true);
let result = execute_batch_and_get_first_response(&client, &mut runtime, batch);
let tx_views = TransactionView::vec_from_response(result).unwrap();
assert_eq!(tx_views.len() as u64, total);
}
}
#[test]
// Check that if version and ledger_version parameters are None, then the server returns the latest
// known state.
fn test_get_account_state_with_proof_null_versions() {
let (mock_db, client, mut runtime) = create_database_client_and_runtime();
let account = get_first_account_from_mock_db(&mock_db);
let mut batch = JsonRpcBatch::default();
batch.add_get_account_state_with_proof_request(account, None, None);
let result = execute_batch_and_get_first_response(&client, &mut runtime, batch);
let received_proof = AccountStateWithProofView::from_response(result).unwrap();
let expected_proof = get_first_state_proof_from_mock_db(&mock_db);
// Check latest version returned, when no version specified
assert_eq!(received_proof.version, expected_proof.version);
}
#[test]
fn test_get_account_state_with_proof() {
let (mock_db, client, mut runtime) = create_database_client_and_runtime();
let account = get_first_account_from_mock_db(&mock_db);
let mut batch = JsonRpcBatch::default();
batch.add_get_account_state_with_proof_request(account, Some(0), Some(0));
let result = execute_batch_and_get_first_response(&client, &mut runtime, batch);
let received_proof = AccountStateWithProofView::from_response(result).unwrap();
let expected_proof = get_first_state_proof_from_mock_db(&mock_db);
let expected_blob = expected_proof.blob.as_ref().unwrap();
let expected_sm_proof = expected_proof.proof.transaction_info_to_account_proof();
let expected_txn_info_with_proof = expected_proof.proof.transaction_info_with_proof();
//version
assert_eq!(received_proof.version, expected_proof.version);
// blob
let account_blob: AccountStateBlob =
lcs::from_bytes(&received_proof.blob.unwrap().into_bytes().unwrap()).unwrap();
assert_eq!(account_blob, *expected_blob);
// proof
let sm_proof: SparseMerkleProof = lcs::from_bytes(
&received_proof
.proof
.transaction_info_to_account_proof
.into_bytes()
.unwrap(),
)
.unwrap();
assert_eq!(sm_proof, *expected_sm_proof);
let txn_info: TransactionInfo =
lcs::from_bytes(&received_proof.proof.transaction_info.into_bytes().unwrap()).unwrap();
let li_proof: TransactionAccumulatorProof = lcs::from_bytes(
&received_proof
.proof
.ledger_info_to_transaction_info_proof
.into_bytes()
.unwrap(),
)
.unwrap();
let txn_info_with_proof = TransactionInfoWithProof::new(li_proof, txn_info);
assert_eq!(txn_info_with_proof, *expected_txn_info_with_proof);
}
#[test]
fn test_get_state_proof() {
let (mock_db, client, mut runtime) = create_database_client_and_runtime();
let version = mock_db.version;
let mut batch = JsonRpcBatch::default();
batch.add_get_state_proof_request(version);
let result = execute_batch_and_get_first_response(&client, &mut runtime, batch);
let proof = StateProofView::from_response(result).unwrap();
let li: LedgerInfoWithSignatures =
lcs::from_bytes(&proof.ledger_info_with_signatures.into_bytes().unwrap()).unwrap();
assert_eq!(li.ledger_info().version(), version);
}
#[test]
fn test_get_network_status() {
let (_mock_db, client, mut runtime) = create_database_client_and_runtime();
let mut batch = JsonRpcBatch::default();
batch.add_get_network_status_request();
if let JsonRpcResponse::NetworkStatusResponse(connected_peers) =
execute_batch_and_get_first_response(&client, &mut runtime, batch)
{
// expect no connected peers when no network is running
assert_eq!(connected_peers.as_u64().unwrap(), 0);
} else {
panic!("did not receive expected json rpc response");
}
}
/// Creates and returns a MockLibraDB, JsonRpcAsyncClient and corresponding server Runtime tuple for
/// testing. The given channel_buffer specifies the buffer size of the mempool client sender channel.
fn create_database_client_and_runtime() -> (MockLibraDB, JsonRpcAsyncClient, Runtime) {
let (mock_db, runtime, url, _) = create_db_and_runtime();
let client =
JsonRpcAsyncClient::new(reqwest::Url::from_str(url.as_str()).expect("invalid url"));
(mock_db, client, runtime)
}
fn create_db_and_runtime() -> (
MockLibraDB,
Runtime,
String,
Receiver<(
SignedTransaction,
oneshot::Sender<anyhow::Result<SubmissionStatus>>,
)>,
) {
let mock_db = mock_db();
let host = "0.0.0.0";
let port = utils::get_available_port();
let address = format!("{}:{}", host, port);
let (mp_sender, mp_events) = channel(1);
let runtime = test_bootstrap(
address.parse().unwrap(),
Arc::new(mock_db.clone()),
mp_sender,
);
(mock_db, runtime, format!("http://{}", address), mp_events)
}
/// Returns the first account address stored in the given mock database.
fn get_first_account_from_mock_db(mock_db: &MockLibraDB) -> AccountAddress {
*mock_db
.all_accounts
.keys()
.next()
.expect("mock DB missing account")
}
/// Returns the first account_state_with_proof stored in the given mock database.
fn get_first_state_proof_from_mock_db(mock_db: &MockLibraDB) -> AccountStateWithProof {
mock_db
.account_state_with_proof
.get(0)
.expect("mock DB missing account state with proof")
.clone()
}
/// Executes the given JsonRPCBatch using the specified JsonRpcAsyncClient and Runtime, and returns
/// the first JsonRpcResponse produced for the batch.
fn execute_batch_and_get_first_response(
client: &JsonRpcAsyncClient,
runtime: &mut Runtime,
batch: JsonRpcBatch,
) -> JsonRpcResponse {
runtime
.block_on(client.execute(batch))
.unwrap()
.remove(0)
.unwrap()
} | |
webrtc_duet.py | """
PySyft Duet (WebRTC)
This class aims to implement the PySyft Duet concept by using WebRTC protocol as a
connection channel in order to allow two different users to establish a direct
connection with high-quality Real-time Communication using private addresses.
The most common example showing how it can be used is the notebook demo example:
Two different jupyter / collab notebooks in different machines using private addresses
behind routers, proxies and firewalls can connect using a full-duplex channel
to perform machine learning and data science tasks, working as a client
and server at the same time.
PS 1: You need a signaling server running somewhere.
If you don't know any public address running this service, or want to set up your own
signaling network you can use PyGrid's network app.
For local development you can run:
$ python src/syft/grid/example_nodes/network.py
PS 2: The PyGrid repo has a complimentary branch that matches the current PySyft release.
To use this feature you must use the correct PyGrid branch.
(https://github.com/OpenMined/PyGrid/)
You can get more details about all this process, in the syft/grid/connections/webrtc.py
source code.
"""
# stdlib
import asyncio
from typing import Optional
# third party
from nacl.signing import SigningKey
# relative
from ... import serialize
from ...core.io.route import SoloRoute
from ...core.node.common.metadata import Metadata
from ...core.node.domain.client import DomainClient
from ...core.node.domain.domain import Domain
from ...logger import error
from ...logger import traceback_and_raise
from ..connections.webrtc import WebRTCConnection
from ..services.signaling_service import AnswerPullRequestMessage
from ..services.signaling_service import InvalidLoopBackRequest
from ..services.signaling_service import OfferPullRequestMessage
from ..services.signaling_service import SignalingAnswerMessage
from ..services.signaling_service import SignalingOfferMessage
from .signaling_client import SignalingClient
class Duet(DomainClient):
def __init__(
self,
node: Domain,
target_id: str,
signaling_client: SignalingClient,
offer: bool = True,
):
# Generate a signing key
self.signing_key = SigningKey.generate()
self.verify_key = self.signing_key.verify_key
# Async Queues | self._pull_msg_queue: asyncio.Queue = asyncio.Queue()
# As we need to inject a node instance inside of
# a bidirectional connection in order to allow this
# connection to work as a client and server using the
# same channel. We need to be aware about forwarding
# node instance references in order to avoid multiple
# references to the same object (this makes the garbage
# collecting difficult).
# A good solution to avoid this problem is forward just
# weak references. These references works like a proxy
# not creating a strong reference to the object.
# So, If we delete the real object instance, the
# garbage collect will call the __del__ method without problem.
self.node = node
# WebRTCConnection instance ( Bidirectional Connection )
self.connection = WebRTCConnection(node=self.node)
# Client used to exchange signaling messages in order to establish a connection
# NOTE: In the future it may be a good idea to modularize this client to make
# it pluggable using different connection protocols.
self.signaling_client = signaling_client
# If this peer will not start the signaling process
if not offer:
# Start adding an OfferPullRequest in order to verify if
# the desired address pushed an offer request to connect with you.
# This will trigger the pull async task to be check signaling notifications
self._pull_msg_queue.put_nowait(
OfferPullRequestMessage(
address=self.signaling_client.address,
target_peer=target_id,
host_peer=self.signaling_client.duet_id,
reply_to=self.signaling_client.address,
)
)
else:
# Push a WebRTC offer request to the address.
self.send_offer(target_id=target_id)
# This flag is used in order to finish the signaling process gracefully
# While self._available is True, the pull/push tasks will be running
# This flag will be setted to false when:
# 1 - End of the signaling process (checked by _update_availability()).
# 2 - Any Exception raised during these tasks.
self._available = True
# This attribute will be setted during the signaling messages exchange,
# and used to create a SoloRoute for the both sides.
self._client_metadata: Optional[Metadata] = None
# Start async tasks and wait until one of them finishes.
# As mentioned before, these tasks can be finished by two reasons:
# 1 - Signaling process ends
# 2 - Unexpected Exception
try:
asyncio.run(self.notify())
# If client_metadata != None, then the connection was created successfully.
if self._client_metadata is not None:
# Deserialize client's metadata in order to obtain
# PySyft's location structure
( # type: ignore
spec_location,
name,
_,
) = DomainClient.deserialize_client_metadata_from_node(
metadata=serialize(self._client_metadata)
)
# Create a SoloRoute
route = SoloRoute(destination=spec_location, connection=self.connection)
# Intialize the super class
super().__init__(
domain=spec_location,
name=name,
routes=[route],
signing_key=self.signing_key,
verify_key=self.verify_key,
)
self.connection._client_address = self.address
# If client_metada is None, then an exception occurred during the process
# The exception has been caught and saved in self._exception
else:
# NOTE: Maybe we should create a custom exception type.
traceback_and_raise(
Exception(
f"Something went wrong during the Duet init process. {self._exception}"
)
)
except Exception as e:
traceback_and_raise(e)
async def notify(self) -> None:
try:
# Enqueue Pull/Push async tasks
push_task = asyncio.ensure_future(self.push())
pull_task = asyncio.ensure_future(self.pull())
# Wait until one of them finishes
done, pending = await asyncio.wait(
[pull_task, push_task], return_when=asyncio.FIRST_COMPLETED
)
# Finish the pending one.
for task in pending:
task.cancel()
except Exception as e:
traceback_and_raise(e)
def close(self) -> None:
self.connection.close()
async def push(self) -> None:
# This task is responsible for pushing offer/answer messages.
try:
while self._available:
# If push_msg_queue is empty,
# give up task queue priority, giving
# computing time to the next task.
msg = await self._push_msg_queue.get()
# If self.push_msg_queue.get() returned a message (SignalingOfferMessage,SignalingAnswerMessage)
# send it to the signaling server.
self.signaling_client.send_immediate_msg_without_reply(msg=msg)
except Exception as e:
log = f"Got an exception in Duet push. {e}"
error(log)
# If any exception raises, set the self._available flag to False
# in order to finish gracefully all the async tasks and save the exception.
self._available = False
self._exception: Exception = e
async def pull(self) -> None:
try:
while self._available:
# If pull_msg_queue is empty,
# give up task queue priority, giving
# computing time to the next task.
msg = await self._pull_msg_queue.get()
# If self.pull_msg_queue.get() returned a message (OfferPullRequestMessage,AnswerPullRequestMessage)
# send it to the signaling server.
_response = self.signaling_client.send_immediate_msg_with_reply(msg=msg)
# If Signaling Offer Message was found
if isinstance(_response, SignalingOfferMessage):
await self._send_answer(msg=_response)
# If Signaling Answer Message was found
elif isinstance(_response, SignalingAnswerMessage):
await self._ack(msg=_response)
# If LoopBack Message it was a loopback request
elif isinstance(_response, InvalidLoopBackRequest):
traceback_and_raise(
Exception(
"You can't perform p2p connection using your current node address as a destination peer."
)
)
# If Signaling Message weren't found
else:
# Just enqueue the request to be processed later.
self._pull_msg_queue.put_nowait(msg)
# Checks if the signaling process is over.
self._available = self._update_availability()
await asyncio.sleep(0.5)
except Exception as e:
log = f"Got an exception in Duet pull. {e}"
error(log)
# If any exception raises, set the self._available flag to False
# in order to finish gracefully all the async tasks and save the exception.
self._available = False
self._exception = e
def send_offer(self, target_id: str) -> None:
"""Starts a new signaling process by creating a new
offer message and pushing it to the Signaling Server."""
try:
# Generates an offer request payload containing
# local network description data/metadata (IP, MAC, Mask, etc...)
payload = asyncio.run(self.connection._set_offer())
# Creates a PySyft's SignalingOfferMessage
signaling_offer = SignalingOfferMessage(
address=self.signaling_client.address, # Target's address
payload=payload, # Offer Payload
host_metadata=self.node.get_metadata_for_client(), # Own Node Metadata
target_peer=target_id,
host_peer=self.signaling_client.duet_id, # Own Node ID
)
# Enqueue it in push msg queue to be sent to the signaling server.
self._push_msg_queue.put_nowait(signaling_offer)
# Create/enqueue a new AnswerPullRequest in order to wait for signaling response.
self._pull_msg_queue.put_nowait(
AnswerPullRequestMessage(
address=self.signaling_client.address,
target_peer=target_id,
host_peer=self.signaling_client.duet_id,
reply_to=self.signaling_client.address,
)
)
except Exception as e:
traceback_and_raise(e)
async def _send_answer(self, msg: SignalingOfferMessage) -> None:
"""Process SignalingOfferMessage and create a new
SignalingAnswerMessage as a response"""
try:
# Process received offer message updating target's remote address
# Generates an answer request payload containing
# local network description data/metadata (IP, MAC, Mask, etc...)
payload = asyncio.run(self.connection._set_answer(payload=msg.payload))
# Save remote node's metadata in roder to create a SoloRoute.
self._client_metadata = msg.host_metadata
# Create a new SignalingAnswerMessage
signaling_answer = SignalingAnswerMessage(
address=self.signaling_client.address,
payload=payload, # Signaling answer payload
host_metadata=self.node.get_metadata_for_client(), # Own Node Metadata
target_peer=msg.host_peer, # Remote Node ID
host_peer=self.signaling_client.duet_id,
)
# Enqueue it in the push msg queue to be sent to the signaling server.
await self._push_msg_queue.put(signaling_answer)
except Exception as e:
traceback_and_raise(e)
async def _ack(self, msg: SignalingAnswerMessage) -> None:
"""Last signaling message, stores remote Node
metadata and updates target's remote address"""
try:
# Save remote node's metadata in roder to create a SoloRoute.
self._client_metadata = msg.host_metadata
# Process received offer message updating target's remote address
await self.connection._process_answer(payload=msg.payload)
except Exception as e:
traceback_and_raise(e)
def _update_availability(self) -> bool:
"""Method used to check if the signaling process is over.
:return: Boolean flag, True if it's NOT over, and False if it's over.
:rtype: Boolean
"""
available = False
try:
available = (
not self._pull_msg_queue.empty()
and self.connection.peer_connection is not None
)
except Exception as e:
traceback_and_raise(e)
return available | # These queues will be used in order to enqueue/dequeue
# messages to be sent to the signaling server.
self._push_msg_queue: asyncio.Queue = asyncio.Queue() |
deleted_web_apps_operations.py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
import uuid
from .. import models
class DeletedWebAppsOperations(object):
"""DeletedWebAppsOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An objec model deserializer.
:ivar api_version: API Version. Constant value: "2016-03-01".
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2016-03-01"
self.config = config
def list(
self, custom_headers=None, raw=False, **operation_config):
"""Get all deleted apps for a subscription.
Get all deleted apps for a subscription.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`DeletedSitePaged
<azure.mgmt.web.models.DeletedSitePaged>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/providers/Microsoft.Web/deletedSites'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(
request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.DeletedSitePaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.DeletedSitePaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
| def list_by_resource_group(
self, resource_group_name, custom_headers=None, raw=False, **operation_config):
"""Gets deleted web apps in subscription.
Gets deleted web apps in subscription.
:param resource_group_name: Name of the resource group to which the
resource belongs.
:type resource_group_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`DeletedSitePaged
<azure.mgmt.web.models.DeletedSitePaged>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/deletedSites'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern='^[-\w\._\(\)]+[^\.]$'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.DeletedSitePaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.DeletedSitePaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized | |
log.go | // Copyright 2018 The go-juchain Authors
// This file is part of the go-juchain library.
//
// The go-juchain library is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// The go-juchain library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with the go-juchain library. If not, see <http://www.gnu.org/licenses/>.
package metrics
import (
"time"
)
type Logger interface {
Printf(format string, v ...interface{})
}
func Log(r Registry, freq time.Duration, l Logger) {
LogScaled(r, freq, time.Nanosecond, l)
}
// Output each metric in the given registry periodically using the given
// logger. Print timings in `scale` units (eg time.Millisecond) rather than nanos.
func | (r Registry, freq time.Duration, scale time.Duration, l Logger) {
du := float64(scale)
duSuffix := scale.String()[1:]
for range time.Tick(freq) {
r.Each(func(name string, i interface{}) {
switch metric := i.(type) {
case Counter:
l.Printf("counter %s\n", name)
l.Printf(" count: %9d\n", metric.Count())
case Gauge:
l.Printf("gauge %s\n", name)
l.Printf(" value: %9d\n", metric.Value())
case GaugeFloat64:
l.Printf("gauge %s\n", name)
l.Printf(" value: %f\n", metric.Value())
case Healthcheck:
metric.Check()
l.Printf("healthcheck %s\n", name)
l.Printf(" error: %v\n", metric.Error())
case Histogram:
h := metric.Snapshot()
ps := h.Percentiles([]float64{0.5, 0.75, 0.95, 0.99, 0.999})
l.Printf("histogram %s\n", name)
l.Printf(" count: %9d\n", h.Count())
l.Printf(" min: %9d\n", h.Min())
l.Printf(" max: %9d\n", h.Max())
l.Printf(" mean: %12.2f\n", h.Mean())
l.Printf(" stddev: %12.2f\n", h.StdDev())
l.Printf(" median: %12.2f\n", ps[0])
l.Printf(" 75%%: %12.2f\n", ps[1])
l.Printf(" 95%%: %12.2f\n", ps[2])
l.Printf(" 99%%: %12.2f\n", ps[3])
l.Printf(" 99.9%%: %12.2f\n", ps[4])
case Meter:
m := metric.Snapshot()
l.Printf("meter %s\n", name)
l.Printf(" count: %9d\n", m.Count())
l.Printf(" 1-min rate: %12.2f\n", m.Rate1())
l.Printf(" 5-min rate: %12.2f\n", m.Rate5())
l.Printf(" 15-min rate: %12.2f\n", m.Rate15())
l.Printf(" mean rate: %12.2f\n", m.RateMean())
case Timer:
t := metric.Snapshot()
ps := t.Percentiles([]float64{0.5, 0.75, 0.95, 0.99, 0.999})
l.Printf("timer %s\n", name)
l.Printf(" count: %9d\n", t.Count())
l.Printf(" min: %12.2f%s\n", float64(t.Min())/du, duSuffix)
l.Printf(" max: %12.2f%s\n", float64(t.Max())/du, duSuffix)
l.Printf(" mean: %12.2f%s\n", t.Mean()/du, duSuffix)
l.Printf(" stddev: %12.2f%s\n", t.StdDev()/du, duSuffix)
l.Printf(" median: %12.2f%s\n", ps[0]/du, duSuffix)
l.Printf(" 75%%: %12.2f%s\n", ps[1]/du, duSuffix)
l.Printf(" 95%%: %12.2f%s\n", ps[2]/du, duSuffix)
l.Printf(" 99%%: %12.2f%s\n", ps[3]/du, duSuffix)
l.Printf(" 99.9%%: %12.2f%s\n", ps[4]/du, duSuffix)
l.Printf(" 1-min rate: %12.2f\n", t.Rate1())
l.Printf(" 5-min rate: %12.2f\n", t.Rate5())
l.Printf(" 15-min rate: %12.2f\n", t.Rate15())
l.Printf(" mean rate: %12.2f\n", t.RateMean())
}
})
}
}
| LogScaled |
_conversion.py | # noqa: D100
from typing import Optional, Tuple
import numpy as np
import xarray as xr
from xclim.core.calendar import date_range, datetime_to_decimal_year
from xclim.core.units import amount2rate, convert_units_to, declare_units, units2pint
__all__ = [
"humidex",
"tas",
"uas_vas_2_sfcwind",
"sfcwind_2_uas_vas",
"saturation_vapor_pressure",
"relative_humidity",
"specific_humidity",
"snowfall_approximation",
"rain_approximation",
"wind_chill_index",
"clausius_clapeyron_scaled_precipitation",
"potential_evapotranspiration",
]
@declare_units(tas="[temperature]", tdps="[temperature]", hurs="[]")
def humidex(
tas: xr.DataArray,
tdps: Optional[xr.DataArray] = None,
hurs: Optional[xr.DataArray] = None,
) -> xr.DataArray:
r"""Humidex index.
The humidex indicates how hot the air feels to an average person, accounting for the effect of humidity. It
can be loosely interpreted as the equivalent perceived temperature when the air is dry.
Parameters
----------
tas : xarray.DataArray
Air temperature.
tdps : xarray.DataArray,
Dewpoint temperature.
hurs : xarray.DataArray
Relative humidity.
Returns
-------
xarray.DataArray, [temperature]
The humidex index.
Notes
-----
The humidex is usually computed using hourly observations of dry bulb and dewpoint temperatures. It is computed
using the formula based on [masterton79]_:
.. math::
T + {\frac {5}{9}}\left[e - 10\right]
where :math:`T` is the dry bulb air temperature (°C). The term :math:`e` can be computed from the dewpoint
temperature :math:`T_{dewpoint}` in °K:
.. math::
e = 6.112 \times \exp(5417.7530\left({\frac {1}{273.16}}-{\frac {1}{T_{\text{dewpoint}}}}\right)
where the constant 5417.753 reflects the molecular weight of water, latent heat of vaporization,
and the universal gas constant ([mekis15]_). Alternatively, the term :math:`e` can also be computed from
the relative humidity `h` expressed in percent using [sirangelo20]_:
.. math::
e = \frac{h}{100} \times 6.112 * 10^{7.5 T/(T + 237.7)}.
The humidex *comfort scale* ([eccc]_) can be interpreted as follows:
- 20 to 29 : no discomfort;
- 30 to 39 : some discomfort;
- 40 to 45 : great discomfort, avoid exertion;
- 46 and over : dangerous, possible heat stroke;
References
----------
.. [masterton79] Masterton, J. M., & Richardson, F. A. (1979). HUMIDEX, A method of quantifying human discomfort due to excessive heat and humidity, CLI 1-79. Downsview, Ontario: Environment Canada, Atmospheric Environment Service.
.. [mekis15] Éva Mekis, Lucie A. Vincent, Mark W. Shephard & Xuebin Zhang (2015) Observed Trends in Severe Weather Conditions Based on Humidex, Wind Chill, and Heavy Rainfall Events in Canada for 1953–2012, Atmosphere-Ocean, 53:4, 383-397, DOI: 10.1080/07055900.2015.1086970
.. [sirangelo20] Sirangelo, B., Caloiero, T., Coscarelli, R. et al. Combining stochastic models of air temperature and vapour pressure for the analysis of the bioclimatic comfort through the Humidex. Sci Rep 10, 11395 (2020). https://doi.org/10.1038/s41598-020-68297-4
.. [eccc] https://climate.weather.gc.ca/glossary_e.html
"""
if (tdps is None) == (hurs is None):
raise ValueError(
"At least one of `tdps` or `hurs` must be given, and not both."
)
# Vapour pressure in hPa
if tdps is not None:
# Convert dewpoint temperature to Kelvins
tdps = convert_units_to(tdps, "kelvin")
e = 6.112 * np.exp(5417.7530 * (1 / 273.16 - 1.0 / tdps))
elif hurs is not None:
# Convert dry bulb temperature to Celsius
tasC = convert_units_to(tas, "celsius")
e = hurs / 100 * 6.112 * 10 ** (7.5 * tasC / (tasC + 237.7))
# Temperature delta due to humidity in delta_degC
h = 5 / 9 * (e - 10)
h.attrs["units"] = "delta_degree_Celsius"
# Get delta_units for output
du = (1 * units2pint(tas) - 0 * units2pint(tas)).units
h = convert_units_to(h, du)
# Add the delta to the input temperature
out = h + tas
out.attrs["units"] = tas.units
return out
@declare_units(tasmin="[temperature]", tasmax="[temperature]")
def tas(tasmin: xr.DataArray, tasmax: xr.DataArray) -> xr.DataArray:
"""Average temperature from minimum and maximum temperatures.
We assume a symmetrical distribution for the temperature and retrieve the average value as Tg = (Tx + Tn) / 2
Parameters
----------
tasmin : xarray.DataArray
Minimum (daily) temperature
tasmax : xarray.DataArray
Maximum (daily) temperature
Returns
-------
xarray.DataArray
Mean (daily) temperature [same units as tasmin]
"""
tasmax = convert_units_to(tasmax, tasmin)
tas = (tasmax + tasmin) / 2
tas.attrs["units"] = tasmin.attrs["units"]
return tas
@declare_units(uas="[speed]", vas="[speed]", calm_wind_thresh="[speed]")
def uas_vas_2_sfcwind(
uas: xr.DataArray, vas: xr.DataArray, calm_wind_thresh: str = "0.5 m/s"
) -> Tuple[xr.DataArray, xr.DataArray]:
"""Wind speed and direction from the eastward and northward wind components.
Computes the magnitude and angle of the wind vector from its northward and eastward components,
following the meteorological convention that sets calm wind to a direction of 0° and northerly wind to 360°.
Parameters
----------
uas : xr.DataArray
Eastward wind velocity
vas : xr.DataArray
Northward wind velocity
calm_wind_thresh : str
The threshold under which winds are considered "calm" and for which the direction
is set to 0. On the Beaufort scale, calm winds are defined as < 0.5 m/s.
Returns
-------
wind : xr.DataArray, [m s-1]
Wind velocity
wind_from_dir : xr.DataArray, [°]
Direction from which the wind blows, following the meteorological convention where
360 stands for North and 0 for calm winds.
Notes
-----
Winds with a velocity less than `calm_wind_thresh` are given a wind direction of 0°,
while stronger northerly winds are set to 360°.
"""
# Converts the wind speed to m s-1
uas = convert_units_to(uas, "m/s")
vas = convert_units_to(vas, "m/s")
wind_thresh = convert_units_to(calm_wind_thresh, "m/s")
# Wind speed is the hypotenuse of "uas" and "vas"
wind = np.hypot(uas, vas)
wind.attrs["units"] = "m s-1"
# Calculate the angle
wind_from_dir_math = np.degrees(np.arctan2(vas, uas))
# Convert the angle from the mathematical standard to the meteorological standard
wind_from_dir = (270 - wind_from_dir_math) % 360.0
# According to the meteorological standard, calm winds must have a direction of 0°
# while northerly winds have a direction of 360°
# On the Beaufort scale, calm winds are defined as < 0.5 m/s
wind_from_dir = xr.where(wind_from_dir.round() == 0, 360, wind_from_dir)
wind_from_dir = xr.where(wind < wind_thresh, 0, wind_from_dir)
wind_from_dir.attrs["units"] = "degree"
return wind, wind_from_dir
@declare_units(sfcWind="[speed]", sfcWindfromdir="[]")
def sfcwind_2_uas_vas(
sfcWind: xr.DataArray, sfcWindfromdir: xr.DataArray # noqa
) -> Tuple[xr.DataArray, xr.DataArray]:
"""Eastward and northward wind components from the wind speed and direction.
Compute the eastward and northward wind components from the wind speed and direction.
Parameters
----------
sfcWind : xr.DataArray
Wind velocity
sfcWindfromdir : xr.DataArray
Direction from which the wind blows, following the meteorological convention
where 360 stands for North.
Returns
-------
uas : xr.DataArray, [m s-1]
Eastward wind velocity.
vas : xr.DataArray, [m s-1]
Northward wind velocity.
"""
# Converts the wind speed to m s-1
sfcWind = convert_units_to(sfcWind, "m/s") # noqa
# Converts the wind direction from the meteorological standard to the mathematical standard
wind_from_dir_math = (-sfcWindfromdir + 270) % 360.0
# TODO: This commented part should allow us to resample subdaily wind, but needs to be cleaned up and put elsewhere.
# if resample is not None:
# wind = wind.resample(time=resample).mean(dim='time', keep_attrs=True)
#
# # nb_per_day is the number of values each day. This should be calculated
# wind_from_dir_math_per_day = wind_from_dir_math.reshape((len(wind.time), nb_per_day))
# # Averages the subdaily angles around a circle, i.e. mean([0, 360]) = 0, not 180
# wind_from_dir_math = np.concatenate([[degrees(phase(sum(rect(1, radians(d)) for d in angles) / len(angles)))]
# for angles in wind_from_dir_math_per_day])
uas = sfcWind * np.cos(np.radians(wind_from_dir_math))
vas = sfcWind * np.sin(np.radians(wind_from_dir_math))
uas.attrs["units"] = "m s-1"
vas.attrs["units"] = "m s-1"
return uas, vas
@declare_units(tas="[temperature]", ice_thresh="[temperature]")
def saturation_vapor_pressure(
tas: xr.DataArray, ice_thresh: str = None, method: str = "sonntag90" # noqa
) -> xr.DataArray:
"""Saturation vapor pressure from temperature.
Parameters
----------
tas : xr.DataArray
Temperature array.
ice_thresh : str
Threshold temperature under which to switch to equations in reference to ice instead of water.
If None (default) everything is computed with reference to water.
method : {"dewpoint", "goffgratch46", "sonntag90", "tetens30", "wmo08"}
Which method to use, see notes.
Returns
-------
xarray.DataArray, [Pa]
Saturation vapor pressure.
Notes
-----
In all cases implemented here :math:`log(e_{sat})` is an empirically fitted function (usually a polynomial)
where coefficients can be different when ice is taken as reference instead of water. Available methods are:
- "goffgratch46" or "GG46", based on [goffgratch46]_, values and equation taken from [voemel]_.
- "sonntag90" or "SO90", taken from [sonntag90]_.
- "tetens30" or "TE30", based on [tetens30]_, values and equation taken from [voemel]_.
- "wmo08" or "WMO08", taken from [wmo08]_.
References
----------
.. [goffgratch46] Goff, J. A., and S. Gratch (1946) Low-pressure properties of water from -160 to 212 °F, in Transactions of the American Society of Heating and Ventilating Engineers, pp 95-122, presented at the 52nd annual meeting of the American Society of Heating and Ventilating Engineers, New York, 1946.
.. [sonntag90] Sonntag, D. (1990). Important new values of the physical constants of 1986, vapour pressure formulations based on the ITS-90, and psychrometer formulae. Zeitschrift für Meteorologie, 40(5), 340-344.
.. [tetens30] Tetens, O. 1930. Über einige meteorologische Begriffe. Z. Geophys 6: 207-309.
.. [voemel] https://cires1.colorado.edu/~voemel/vp.html
.. [wmo08] World Meteorological Organization. (2008). Guide to meteorological instruments and methods of observation. Geneva, Switzerland: World Meteorological Organization. https://www.weather.gov/media/epz/mesonet/CWOP-WMO8.pdf
"""
if ice_thresh is not None:
thresh = convert_units_to(ice_thresh, "degK")
else:
thresh = convert_units_to("0 K", "degK")
ref_is_water = tas > thresh
if method in ["sonntag90", "SO90"]:
e_sat = xr.where(
ref_is_water,
100
* np.exp( # Where ref_is_water is True, x100 is to convert hPa to Pa
-6096.9385 / tas # type: ignore
+ 16.635794
+ -2.711193e-2 * tas # type: ignore
+ 1.673952e-5 * tas ** 2
+ 2.433502 * np.log(tas) # numpy's log is ln
),
100
* np.exp( # Where ref_is_water is False (thus ref is ice)
-6024.5282 / tas # type: ignore
+ 24.7219
+ 1.0613868e-2 * tas # type: ignore
+ -1.3198825e-5 * tas ** 2
+ -0.49382577 * np.log(tas)
),
)
elif method in ["tetens30", "TE30"]:
e_sat = xr.where(
ref_is_water,
610.78 * np.exp(17.269388 * (tas - 273.16) / (tas - 35.86)),
610.78 * np.exp(21.8745584 * (tas - 273.16) / (tas - 7.66)),
)
elif method in ["goffgratch46", "GG46"]:
Tb = 373.16 # Water boiling temp [K]
eb = 101325 # e_sat at Tb [Pa]
Tp = 273.16 # Triple-point temperature [K]
ep = 611.73 # e_sat at Tp [Pa]
e_sat = xr.where(
ref_is_water,
eb
* 10
** (
-7.90298 * ((Tb / tas) - 1) # type: ignore
+ 5.02808 * np.log10(Tb / tas) # type: ignore
+ -1.3817e-7 * (10 ** (11.344 * (1 - tas / Tb)) - 1)
+ 8.1328e-3 * (10 ** (-3.49149 * ((Tb / tas) - 1)) - 1) # type: ignore
),
ep
* 10
** (
-9.09718 * ((Tp / tas) - 1) # type: ignore
+ -3.56654 * np.log10(Tp / tas) # type: ignore
+ 0.876793 * (1 - tas / Tp)
),
)
elif method in ["wmo08", "WMO08"]:
e_sat = xr.where(
ref_is_water,
611.2 * np.exp(17.62 * (tas - 273.16) / (tas - 30.04)),
611.2 * np.exp(22.46 * (tas - 273.16) / (tas - 0.54)),
)
else:
raise ValueError(
f"Method {method} is not in ['sonntag90', 'tetens30', 'goffgratch46', 'wmo08']"
)
e_sat.attrs["units"] = "Pa"
return e_sat
@declare_units(
tas="[temperature]",
tdps="[temperature]",
huss="[]",
ps="[pressure]",
ice_thresh="[temperature]",
)
def relative_humidity(
tas: xr.DataArray,
tdps: xr.DataArray = None,
huss: xr.DataArray = None,
ps: xr.DataArray = None,
ice_thresh: str = None,
method: str = "sonntag90",
invalid_values: str = "clip",
) -> xr.DataArray:
r"""Relative humidity.
Compute relative humidity from temperature and either dewpoint temperature or specific humidity and pressure through
the saturation vapor pressure.
Parameters
----------
tas : xr.DataArray
Temperature array
tdps : xr.DataArray
Dewpoint temperature, if specified, overrides huss and ps.
huss : xr.DataArray
Specific humidity.
ps : xr.DataArray
Air Pressure.
ice_thresh : str
Threshold temperature under which to switch to equations in reference to ice instead of water.
If None (default) everything is computed with reference to water. Does nothing if 'method' is "bohren98".
method : {"bohren98", "goffgratch46", "sonntag90", "tetens30", "wmo08"}
Which method to use, see notes of this function and of `saturation_vapor_pressure`.
invalid_values : {"clip", "mask", None}
What to do with values outside the 0-100 range. If "clip" (default), clips everything to 0 - 100,
if "mask", replaces values outside the range by np.nan, and if `None`, does nothing.
Returns
-------
xr.DataArray, [%]
Relative humidity.
Notes
-----
In the following, let :math:`T`, :math:`T_d`, :math:`q` and :math:`p` be the temperature,
the dew point temperature, the specific humidity and the air pressure.
**For the "bohren98" method** : This method does not use the saturation vapor pressure directly,
but rather uses an approximation of the ratio of :math:`\frac{e_{sat}(T_d)}{e_{sat}(T)}`.
With :math:`L` the enthalpy of vaporization of water and :math:`R_w` the gas constant for water vapor,
the relative humidity is computed as:
.. math::
RH = e^{\frac{-L (T - T_d)}{R_wTT_d}}
From [BohrenAlbrecht1998]_, formula taken from [Lawrence2005]_. :math:`L = 2.5\times 10^{-6}` J kg-1, exact for :math:`T = 273.15` K, is used.
**Other methods**: With :math:`w`, :math:`w_{sat}`, :math:`e_{sat}` the mixing ratio,
the saturation mixing ratio and the saturation vapor pressure.
If the dewpoint temperature is given, relative humidity is computed as:
.. math::
RH = 100\frac{e_{sat}(T_d)}{e_{sat}(T)}
Otherwise, the specific humidity and the air pressure must be given so relative humidity can be computed as:
.. math::
RH = 100\frac{w}{w_{sat}}
w = \frac{q}{1-q}
w_{sat} = 0.622\frac{e_{sat}}{P - e_{sat}}
The methods differ by how :math:`e_{sat}` is computed. See the doc of :py:meth:`xclim.core.utils.saturation_vapor_pressure`.
References
----------
.. [Lawrence2005] Lawrence, M.G. (2005). The Relationship between Relative Humidity and the Dewpoint Temperature in Moist Air: A Simple Conversion and Applications. Bull. Amer. Meteor. Soc., 86, 225–234, https://doi.org/10.1175/BAMS-86-2-225
.. [BohrenAlbrecht1998] Craig F. Bohren, Bruce A. Albrecht. Atmospheric Thermodynamics. Oxford University Press, 1998.
"""
if method in ("bohren98", "BA90"):
if tdps is None:
raise ValueError("To use method 'bohren98' (BA98), dewpoint must be given.")
tdps = convert_units_to(tdps, "degK")
tas = convert_units_to(tas, "degK")
L = 2.501e6
Rw = (461.5,)
hurs = 100 * np.exp(-L * (tas - tdps) / (Rw * tas * tdps)) # type: ignore
elif tdps is not None:
e_sat_dt = saturation_vapor_pressure(
tas=tdps, ice_thresh=ice_thresh, method=method
)
e_sat_t = saturation_vapor_pressure(
tas=tas, ice_thresh=ice_thresh, method=method
)
hurs = 100 * e_sat_dt / e_sat_t # type: ignore
else:
ps = convert_units_to(ps, "Pa")
huss = convert_units_to(huss, "")
tas = convert_units_to(tas, "degK")
e_sat = saturation_vapor_pressure(tas=tas, ice_thresh=ice_thresh, method=method)
w = huss / (1 - huss)
w_sat = 0.62198 * e_sat / (ps - e_sat) # type: ignore
hurs = 100 * w / w_sat
if invalid_values == "clip":
hurs = hurs.clip(0, 100)
elif invalid_values == "mask":
hurs = hurs.where((hurs <= 100) & (hurs >= 0))
hurs.attrs["units"] = "%"
return hurs
@declare_units(
tas="[temperature]",
hurs="[]",
ps="[pressure]",
ice_thresh="[temperature]",
)
def specific_humidity(
tas: xr.DataArray,
hurs: xr.DataArray,
ps: xr.DataArray,
ice_thresh: str = None,
method: str = "sonntag90",
invalid_values: str = None,
) -> xr.DataArray:
r"""Specific humidity from temperature, relative humidity and pressure.
Parameters
----------
tas : xr.DataArray
Temperature array
hurs : xr.DataArray
Relative Humidity.
ps : xr.DataArray
Air Pressure.
ice_thresh : str
Threshold temperature under which to switch to equations in reference to ice instead of water.
If None (default) everything is computed with reference to water.
method : {"goffgratch46", "sonntag90", "tetens30", "wmo08"}
Which method to use, see notes of this function and of `saturation_vapor_pressure`.
invalid_values : {"clip", "mask", None}
What to do with values larger than the saturation specific humidity and lower than 0.
If "clip" (default), clips everything to 0 - q_sat
if "mask", replaces values outside the range by np.nan,
if None, does nothing.
Returns
-------
xarray.DataArray, [dimensionless]
Specific humidity.
Notes
-----
In the following, let :math:`T`, :math:`hurs` (in %) and :math:`p` be the temperature,
the relative humidity and the air pressure. With :math:`w`, :math:`w_{sat}`, :math:`e_{sat}` the mixing ratio,
the saturation mixing ratio and the saturation vapor pressure, specific humidity :math:`q` is computed as:
.. math::
w_{sat} = 0.622\frac{e_{sat}}{P - e_{sat}}
w = w_{sat} * hurs / 100
q = w / (1 + w)
The methods differ by how :math:`e_{sat}` is computed. See the doc of `xclim.core.utils.saturation_vapor_pressure`.
If `invalid_values` is not `None`, the saturation specific humidity :math:`q_{sat}` is computed as:
.. math::
q_{sat} = w_{sat} / (1 + w_{sat})
"""
ps = convert_units_to(ps, "Pa")
hurs = convert_units_to(hurs, "")
tas = convert_units_to(tas, "degK")
e_sat = saturation_vapor_pressure(tas=tas, ice_thresh=ice_thresh, method=method)
w_sat = 0.62198 * e_sat / (ps - e_sat) # type: ignore
w = w_sat * hurs
q = w / (1 + w)
if invalid_values is not None:
q_sat = w_sat / (1 + w_sat)
if invalid_values == "clip":
q = q.clip(0, q_sat)
elif invalid_values == "mask":
q = q.where((q <= q_sat) & (q >= 0))
q.attrs["units"] = ""
return q
@declare_units(pr="[precipitation]", tas="[temperature]", thresh="[temperature]")
def snowfall_approximation(
pr: xr.DataArray,
tas: xr.DataArray,
thresh: str = "0 degC",
method: str = "binary",
) -> xr.DataArray:
"""Snowfall approximation from total precipitation and temperature.
Solid precipitation estimated from precipitation and temperature according to a given method.
Parameters
----------
pr : xarray.DataArray
Mean daily precipitation flux.
tas : xarray.DataArray, optional
Mean, maximum, or minimum daily temperature.
thresh : str,
Threshold temperature, used by method "binary".
method : {"binary", "brown", "auer"}
Which method to use when approximating snowfall from total precipitation. See notes.
Returns
-------
xarray.DataArray, [same units as pr]
Solid precipitation flux.
Notes
-----
The following methods are available to approximate snowfall and are drawn from the
Canadian Land Surface Scheme (CLASS, [Verseghy09]_).
- ``'binary'`` : When the temperature is under the freezing threshold, precipitation
is assumed to be solid. The method is agnostic to the type of temperature used
(mean, maximum or minimum).
- ``'brown'`` : The phase between the freezing threshold goes from solid to liquid linearly
over a range of 2°C over the freezing point.
- ``'auer'`` : The phase between the freezing threshold goes from solid to liquid as a degree six
polynomial over a range of 6°C over the freezing point.
References
----------
.. [Verseghy09] Diana Verseghy (2009), CLASS – The Canadian Land Surface Scheme (Version 3.4), Technical
Documentation (Version 1.1), Environment Canada, Climate Research Division, Science and Technology Branch.
https://gitlab.com/cccma/classic/-/blob/master/src/atmosphericVarsCalc.f90
"""
if method == "binary":
thresh = convert_units_to(thresh, tas)
prsn = pr.where(tas <= thresh, 0)
elif method == "brown":
# Freezing point + 2C in the native units
upper = convert_units_to(convert_units_to(thresh, "degC") + 2, tas)
thresh = convert_units_to(thresh, tas)
# Interpolate fraction over temperature (in units of tas)
t = xr.DataArray(
[-np.inf, thresh, upper, np.inf], dims=("tas",), attrs={"units": "degC"}
)
fraction = xr.DataArray([1.0, 1.0, 0.0, 0.0], dims=("tas",), coords={"tas": t})
# Multiply precip by snowfall fraction
prsn = pr * fraction.interp(tas=tas, method="linear")
elif method == "auer":
dtas = convert_units_to(tas, "degK") - convert_units_to(thresh, "degK")
# Create nodes for the snowfall fraction: -inf, thresh, ..., thresh+6, inf [degC]
t = np.concatenate(
[[-273.15], np.linspace(0, 6, 100, endpoint=False), [6, 1e10]]
)
t = xr.DataArray(t, dims="tas", name="tas", coords={"tas": t})
# The polynomial coefficients, valid between thresh and thresh + 6 (defined in CLASS)
coeffs = xr.DataArray(
[100, 4.6664, -15.038, -1.5089, 2.0399, -0.366, 0.0202],
dims=("degree",),
coords={"degree": range(7)},
)
fraction = xr.polyval(t.tas, coeffs).clip(0, 100) / 100
fraction[0] = 1
fraction[-2:] = 0
| else:
raise ValueError(f"Method {method} not one of 'binary', 'brown' or 'auer'.")
prsn.attrs["units"] = pr.attrs["units"]
return prsn
@declare_units(pr="[precipitation]", tas="[temperature]", thresh="[temperature]")
def rain_approximation(
pr: xr.DataArray,
tas: xr.DataArray,
thresh: str = "0 degC",
method: str = "binary",
) -> xr.DataArray:
"""Rainfall approximation from total precipitation and temperature.
Liquid precipitation estimated from precipitation and temperature according to a given method.
This is a convenience method based on :py:func:`snowfall_approximation`, see the latter for details.
Parameters
----------
pr : xarray.DataArray
Mean daily precipitation flux.
tas : xarray.DataArray, optional
Mean, maximum, or minimum daily temperature.
thresh : str,
Threshold temperature, used by method "binary".
method : {"binary", "brown", "auer"}
Which method to use when approximating snowfall from total precipitation. See notes.
Returns
-------
xarray.DataArray, [same units as pr]
Liquid precipitation rate.
Notes
-----
This method computes the snowfall approximation and subtracts it from the total
precipitation to estimate the liquid rain precipitation.
See also
--------
snowfall_approximation
"""
prra = pr - snowfall_approximation(pr, tas, thresh=thresh, method=method)
prra.attrs["units"] = pr.attrs["units"]
return prra
@declare_units(
tas="[temperature]",
sfcWind="[speed]",
)
def wind_chill_index(
tas: xr.DataArray,
sfcWind: xr.DataArray,
method: str = "CAN",
mask_invalid: bool = True,
):
r"""Wind chill index.
The Wind Chill Index is an estimation of how cold the weather feels to the average person.
It is computed from the air temperature and the 10-m wind. As defined by the Environment and Climate Change Canada ([MVSZ15]_),
two equations exist, the conventional one and one for slow winds (usually < 5 km/h), see Notes.
Parameters
----------
tas : xarray.DataArray
Surface air temperature.
sfcWind : xarray.DataArray
Surface wind speed (10 m).
method : {'CAN', 'US'}
If "CAN" (default), a "slow wind" equation is used where winds are slower than 5 km/h, see Notes.
mask_invalid : bool
Whether to mask values when the inputs are outside their validity range. or not.
If True (default), points where the temperature is above a threshold are masked.
The threshold is 0°C for the canadian method and 50°F for the american one.
With the latter method, points where sfcWind < 3 mph are also masked.
Returns
-------
xarray.DataArray, [degC]
Wind Chill Index.
Notes
-----
Following the calculations of Environment and Climate Change Canada, this function switches from the standardized index
to another one for slow winds. The standard index is the same as used by the National Weather Service of the USA. Given
a temperature at surface :math:`T` (in °C) and 10-m wind speed :math:`V` (in km/h), the Wind Chill Index :math:`W` (dimensionless)
is computed as:
.. math::
W = 13.12 + 0.6125*T - 11.37*V^0.16 + 0.3965*T*V^0.16
Under slow winds (:math:`V < 5` km/h), and using the canadian method, it becomes:
.. math::
W = T + \frac{-1.59 + 0.1345 * T}{5} * V
Both equations are invalid for temperature over 0°C in the canadian method.
The american Wind Chill Temperature index (WCT), as defined by USA's National Weather Service, is computed when
`method='US'`. In that case, the maximal valid temperature is 50°F (10 °C) and minimal wind speed is 3 mph (4.8 km/h).
References
----------
.. [MVSZ15] Éva Mekis, Lucie A. Vincent, Mark W. Shephard & Xuebin Zhang (2015) Observed Trends in Severe Weather Conditions Based on Humidex, Wind Chill, and Heavy Rainfall Events in Canada for 1953–2012, Atmosphere-Ocean, 53:4, 383-397, DOI: 10.1080/07055900.2015.1086970
Osczevski, R., & Bluestein, M. (2005). The New Wind Chill Equivalent Temperature Chart. Bulletin of the American Meteorological Society, 86(10), 1453–1458. https://doi.org/10.1175/BAMS-86-10-1453
.. [NWS] Wind Chill Questions, Cold Resources, National Weather Service, retrieved 25-05-21. https://www.weather.gov/safety/cold-faqs
"""
tas = convert_units_to(tas, "degC")
sfcWind = convert_units_to(sfcWind, "km/h")
V = sfcWind ** 0.16
W = 13.12 + 0.6215 * tas - 11.37 * V + 0.3965 * tas * V
if method.upper() == "CAN":
W = xr.where(sfcWind < 5, tas + sfcWind * (-1.59 + 0.1345 * tas) / 5, W)
elif method.upper() != "US":
raise ValueError(f"`method` must be one of 'US' and 'CAN'. Got '{method}'.")
if mask_invalid:
mask = {"CAN": tas <= 0, "US": (sfcWind > 4.828032) & (tas <= 10)}
W = W.where(mask[method.upper()])
W.attrs["units"] = "degC"
return W
@declare_units(
delta_tas="[temperature]",
pr_baseline="[precipitation]",
)
def clausius_clapeyron_scaled_precipitation(
delta_tas: xr.DataArray,
pr_baseline: xr.DataArray,
cc_scale_factor: float = 1.07,
) -> xr.DataArray:
r"""Scale precipitation according to the Clausius-Clapeyron relation.
Parameters
----------
delta_tas : xarray.DataArray
Difference in temperature between a baseline climatology and another climatology.
pr_baseline : xarray.DataArray
Baseline precipitation to adjust with Clausius-Clapeyron.
cc_scale_factor : float (default = 1.07)
Clausius Clapeyron scale factor.
Returns
-------
DataArray
Baseline precipitation scaled to other climatology using Clausius-Clapeyron relationship.
Notes
-----
The Clausius-Clapeyron equation for water vapor under typical atmospheric conditions states that the saturation
water vapor pressure :math:`e_s` changes approximately exponentially with temperature
.. math::
\frac{\\mathrm{d}e_s(T)}{\\mathrm{d}T} \approx 1.07 e_s(T)
This function assumes that precipitation can be scaled by the same factor.
Warnings
--------
Make sure that `delta_tas` is computed over a baseline compatible with `pr_baseline`. So for example,
if `delta_tas` is the climatological difference between a baseline and a future period, then `pr_baseline`
should be precipitations over a period within the same baseline.
"""
# Get difference in temperature. Time-invariant baseline temperature (from above) is broadcast.
delta_tas = convert_units_to(delta_tas, "delta_degreeC")
# Calculate scaled precipitation.
pr_out = pr_baseline * (cc_scale_factor ** delta_tas)
pr_out.attrs["units"] = pr_baseline.attrs["units"]
return pr_out
@declare_units(tasmin="[temperature]", tasmax="[temperature]", tas="[temperature]")
def potential_evapotranspiration(
tasmin: Optional[xr.DataArray] = None,
tasmax: Optional[xr.DataArray] = None,
tas: Optional[xr.DataArray] = None,
method: str = "BR65",
) -> xr.DataArray:
"""Potential evapotranspiration.
The potential for water evaporation from soil and transpiration by plants if the water supply is
sufficient, according to a given method.
Parameters
----------
tasmin : xarray.DataArray
Minimum daily temperature.
tasmax : xarray.DataArray
Maximum daily temperature.
tas : xarray.DataArray
Mean daily temperature.
method : {"baierrobertson65", "BR65", "hargreaves85", "HG85", "thornthwaite48", "TW48"}
Which method to use, see notes.
Returns
-------
xarray.DataArray
Notes
-----
Available methods are:
- "baierrobertson65" or "BR65", based on [baierrobertson65]_. Requires tasmin and tasmax, daily [D] freq.
- "hargreaves85" or "HG85", based on [hargreaves85]_. Requires tasmin and tasmax, daily [D] freq. (optional: tas can be given in addition of tasmin and tasmax).
- "thornthwaite48" or "TW48", based on [thornthwaite48]_. Requires tasmin and tasmax, monthly [MS] or daily [D] freq. (optional: tas can be given instead of tasmin and tasmax).
References
----------
.. [baierrobertson65] Baier, W., & Robertson, G. W. (1965). Estimation of latent evaporation from simple weather observations. Canadian journal of plant science, 45(3), 276-284.
.. [hargreaves85] Hargreaves, G. H., & Samani, Z. A. (1985). Reference crop evapotranspiration from temperature. Applied engineering in agriculture, 1(2), 96-99.
.. [thornthwaite48] Thornthwaite, C. W. (1948). An approach toward a rational classification of climate. Geographical review, 38(1), 55-94.
"""
if method in ["baierrobertson65", "BR65"]:
tasmin = convert_units_to(tasmin, "degF")
tasmax = convert_units_to(tasmax, "degF")
latr = (tasmin.lat * np.pi) / 180
gsc = 0.082 # MJ/m2/min
# julian day fraction
jd_frac = (datetime_to_decimal_year(tasmin.time) % 1) * 2 * np.pi
ds = 0.409 * np.sin(jd_frac - 1.39)
dr = 1 + 0.033 * np.cos(jd_frac)
omega = np.arccos(-np.tan(latr) * np.tan(ds))
re = (
(24 * 60 / np.pi)
* gsc
* dr
* (
omega * np.sin(latr) * np.sin(ds)
+ np.cos(latr) * np.cos(ds) * np.sin(omega)
)
) # MJ/m2/day
re = re / 4.1864e-2 # cal/cm2/day
# Baier et Robertson(1965) formula
out = 0.094 * (
-87.03 + 0.928 * tasmax + 0.933 * (tasmax - tasmin) + 0.0486 * re
)
out = out.clip(0)
elif method in ["hargreaves85", "HG85"]:
tasmin = convert_units_to(tasmin, "degC")
tasmax = convert_units_to(tasmax, "degC")
if tas is None:
tas = (tasmin + tasmax) / 2
else:
tas = convert_units_to(tas, "degC")
latr = (tasmin.lat * np.pi) / 180
gsc = 0.082 # MJ/m2/min
lv = 2.5 # MJ/kg
# julian day fraction
jd_frac = (datetime_to_decimal_year(tasmin.time) % 1) * 2 * np.pi
ds = 0.409 * np.sin(jd_frac - 1.39)
dr = 1 + 0.033 * np.cos(jd_frac)
omega = np.arccos(-np.tan(latr) * np.tan(ds))
ra = (
(24 * 60 / np.pi)
* gsc
* dr
* (
omega * np.sin(latr) * np.sin(ds)
+ np.cos(latr) * np.cos(ds) * np.sin(omega)
)
) # MJ/m2/day
# Hargreaves and Samani(1985) formula
out = (0.0023 * ra * (tas + 17.8) * (tasmax - tasmin) ** 0.5) / lv
out = out.clip(0)
elif method in ["thornthwaite48", "TW48"]:
if tas is None:
tasmin = convert_units_to(tasmin, "degC")
tasmax = convert_units_to(tasmax, "degC")
tas = (tasmin + tasmax) / 2
else:
tas = convert_units_to(tas, "degC")
tas = tas.clip(0)
tas = tas.resample(time="MS").mean(dim="time")
latr = (tas.lat * np.pi) / 180 # rad
start = "-".join(
[
str(tas.time[0].dt.year.values),
"{:02d}".format(tas.time[0].dt.month.values),
"01",
]
)
end = "-".join(
[
str(tas.time[-1].dt.year.values),
"{:02d}".format(tas.time[-1].dt.month.values),
str(tas.time[-1].dt.daysinmonth.values),
]
)
time_v = xr.DataArray(
date_range(start, end, freq="D", calendar="standard"),
dims="time",
name="time",
)
# julian day fraction
jd_frac = (datetime_to_decimal_year(time_v) % 1) * 2 * np.pi
ds = 0.409 * np.sin(jd_frac - 1.39)
omega = np.arccos(-np.tan(latr) * np.tan(ds)) * 180 / np.pi # degrees
# monthly-mean daytime length (multiples of 12 hours)
dl = 2 * omega / (15 * 12)
dl_m = dl.resample(time="MS").mean(dim="time")
# annual heat index
id_m = (tas / 5) ** 1.514
id_y = id_m.resample(time="YS").sum(dim="time")
tas_idy_a = []
for base_time, indexes in tas.resample(time="YS").groups.items():
tas_y = tas.isel(time=indexes)
id_v = id_y.sel(time=base_time)
a = 6.75e-7 * id_v ** 3 - 7.71e-5 * id_v ** 2 + 0.01791 * id_v + 0.49239
frac = (10 * tas_y / id_v) ** a
tas_idy_a.append(frac)
tas_idy_a = xr.concat(tas_idy_a, dim="time")
# Thornthwaite(1948) formula
out = 1.6 * dl_m * tas_idy_a # cm/month
out = 10 * out # mm/month
else:
raise NotImplementedError(f"'{method}' method is not implemented.")
out.attrs["units"] = "mm"
return amount2rate(out, out_units="kg m-2 s-1") | # Convert snowfall fraction coordinates to native tas units
prsn = pr * fraction.interp(tas=dtas, method="linear")
|
reports_test.go | package operator
import (
"encoding/json"
"fmt"
"testing"
"time"
"github.com/kube-reporting/metering-operator/pkg/operator/reporting"
metering "github.com/kube-reporting/metering-operator/pkg/apis/metering/v1"
meteringUtil "github.com/kube-reporting/metering-operator/pkg/apis/metering/v1/util"
"github.com/kube-reporting/metering-operator/test/testhelpers"
"github.com/sirupsen/logrus"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
v1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
)
func TestGetNextReportPeriod(t *testing.T) {
baseTime := time.Date(2018, time.July, 1, 0, 0, 0, 0, time.UTC)
tests := map[string]struct {
period metering.ReportPeriod
expectError bool
expectReportPeriods []reportPeriod
}{
"hourly": {
period: metering.ReportPeriodHourly,
expectReportPeriods: []reportPeriod{
{
periodStart: baseTime,
periodEnd: time.Date(2018, time.July, 1, 1, 0, 0, 0, time.UTC),
},
{
periodStart: time.Date(2018, time.July, 1, 1, 0, 0, 0, time.UTC),
periodEnd: time.Date(2018, time.July, 1, 2, 0, 0, 0, time.UTC),
},
},
},
"daily": {
period: metering.ReportPeriodDaily,
expectReportPeriods: []reportPeriod{
{
periodStart: baseTime,
periodEnd: time.Date(2018, time.July, 2, 0, 0, 0, 0, time.UTC),
},
{
periodStart: time.Date(2018, time.July, 2, 0, 0, 0, 0, time.UTC),
periodEnd: time.Date(2018, time.July, 3, 0, 0, 0, 0, time.UTC),
},
},
},
"weekly": {
period: metering.ReportPeriodWeekly,
expectReportPeriods: []reportPeriod{
{
periodStart: baseTime,
periodEnd: time.Date(2018, time.July, 8, 0, 0, 0, 0, time.UTC),
},
{
periodStart: time.Date(2018, time.July, 8, 0, 0, 0, 0, time.UTC),
periodEnd: time.Date(2018, time.July, 15, 0, 0, 0, 0, time.UTC),
},
},
},
"monthly": {
period: metering.ReportPeriodMonthly,
expectReportPeriods: []reportPeriod{
{
periodStart: baseTime,
periodEnd: time.Date(2018, time.August, 1, 0, 0, 0, 0, time.UTC),
},
},
},
}
for name, test := range tests {
t.Run(name, func(t *testing.T) {
apiSched := &metering.ReportSchedule{
Period: test.period,
// Normally only one is set, but we simply use a zero value
// for each to make it easier in tests.
Hourly: &metering.ReportScheduleHourly{},
Daily: &metering.ReportScheduleDaily{},
Weekly: &metering.ReportScheduleWeekly{},
Monthly: &metering.ReportScheduleMonthly{},
}
schedule, err := getSchedule(apiSched)
require.NoError(t, err)
lastScheduled := baseTime
for _, expectedReportPeriod := range test.expectReportPeriods {
reportPeriod := getNextReportPeriod(schedule, test.period, lastScheduled)
assert.Equal(t, &expectedReportPeriod, reportPeriod)
lastScheduled = expectedReportPeriod.periodEnd
}
})
}
}
func | (t *testing.T) {
const (
testNamespace = "default"
testReportName = "test-report"
testQueryName = "test-query"
testReportMessage = "test-message"
)
schedule := &metering.ReportSchedule{
Period: metering.ReportPeriodCron,
Cron: &metering.ReportScheduleCron{Expression: "5 4 * * *"},
}
reportStart := &time.Time{}
reportEndTmp := reportStart.AddDate(0, 1, 0)
reportEnd := &reportEndTmp
testTable := []struct {
name string
report *metering.Report
expectFinished bool
}{
{
name: "new report returns false",
report: testhelpers.NewReport(testReportName, testNamespace, testQueryName, reportStart, reportEnd, metering.ReportStatus{}, nil, false, nil),
expectFinished: false,
},
{
name: "finished status on run-once report returns true",
report: testhelpers.NewReport(testReportName, testNamespace, testQueryName, reportStart, reportEnd, metering.ReportStatus{
Conditions: []metering.ReportCondition{
*meteringUtil.NewReportCondition(metering.ReportRunning, v1.ConditionFalse, meteringUtil.ReportFinishedReason, testReportMessage),
},
}, nil, false, nil),
expectFinished: true,
},
{
name: "unset reportingEnd returns false",
report: testhelpers.NewReport(testReportName, testNamespace, testQueryName, reportStart, nil, metering.ReportStatus{
Conditions: []metering.ReportCondition{
*meteringUtil.NewReportCondition(metering.ReportRunning, v1.ConditionFalse, meteringUtil.ReportFinishedReason, testReportMessage),
},
}, schedule, false, nil),
expectFinished: false,
},
{
name: "reportingEnd > lastReportTime returns false",
report: testhelpers.NewReport(testReportName, testNamespace, testQueryName, reportStart, reportEnd, metering.ReportStatus{
Conditions: []metering.ReportCondition{
*meteringUtil.NewReportCondition(metering.ReportRunning, v1.ConditionFalse, meteringUtil.ReportFinishedReason, testReportMessage),
},
LastReportTime: &metav1.Time{Time: reportStart.AddDate(0, 0, 0)},
}, schedule, false, nil),
expectFinished: false,
},
{
name: "reportingEnd < lastReportTime returns true",
report: testhelpers.NewReport(testReportName, testNamespace, testQueryName, reportStart, reportEnd, metering.ReportStatus{
Conditions: []metering.ReportCondition{
*meteringUtil.NewReportCondition(metering.ReportRunning, v1.ConditionFalse, meteringUtil.ReportFinishedReason, testReportMessage),
},
LastReportTime: &metav1.Time{Time: reportStart.AddDate(0, 2, 0)},
}, schedule, false, nil),
expectFinished: true,
},
{
name: "when status running is false and reason is Scheduled return false",
report: testhelpers.NewReport(testReportName, testNamespace, testQueryName, reportStart, reportEnd, metering.ReportStatus{
Conditions: []metering.ReportCondition{
*meteringUtil.NewReportCondition(metering.ReportRunning, v1.ConditionFalse, meteringUtil.ScheduledReason, testReportMessage),
},
}, schedule, false, nil),
expectFinished: false,
},
{
name: "when status running is true and reason is Scheduled return false",
report: testhelpers.NewReport(testReportName, testNamespace, testQueryName, reportStart, reportEnd, metering.ReportStatus{
Conditions: []metering.ReportCondition{
*meteringUtil.NewReportCondition(metering.ReportRunning, v1.ConditionTrue, meteringUtil.ScheduledReason, testReportMessage),
},
}, schedule, false, nil),
expectFinished: false,
},
{
name: "when status running is false and reason is InvalidReport return false",
report: testhelpers.NewReport(testReportName, testNamespace, testQueryName, reportStart, reportEnd, metering.ReportStatus{
Conditions: []metering.ReportCondition{
*meteringUtil.NewReportCondition(metering.ReportRunning, v1.ConditionFalse, meteringUtil.InvalidReportReason, testReportMessage),
},
}, schedule, false, nil),
expectFinished: false,
},
{
name: "when status running is true and reason is InvalidReport return false",
report: testhelpers.NewReport(testReportName, testNamespace, testQueryName, reportStart, reportEnd, metering.ReportStatus{
Conditions: []metering.ReportCondition{
*meteringUtil.NewReportCondition(metering.ReportRunning, v1.ConditionTrue, meteringUtil.InvalidReportReason, testReportMessage),
},
}, schedule, false, nil),
expectFinished: false,
},
{
name: "when status running is false and reason is RunImmediately return false",
report: testhelpers.NewReport(testReportName, testNamespace, testQueryName, reportStart, reportEnd, metering.ReportStatus{
Conditions: []metering.ReportCondition{
*meteringUtil.NewReportCondition(metering.ReportRunning, v1.ConditionFalse, meteringUtil.RunImmediatelyReason, testReportMessage),
},
}, schedule, false, nil),
expectFinished: false,
},
{
name: "when status running is true and reason is RunImmediately return false",
report: testhelpers.NewReport(testReportName, testNamespace, testQueryName, reportStart, reportEnd, metering.ReportStatus{
Conditions: []metering.ReportCondition{
*meteringUtil.NewReportCondition(metering.ReportRunning, v1.ConditionTrue, meteringUtil.RunImmediatelyReason, testReportMessage),
},
}, schedule, false, nil),
expectFinished: false,
},
}
for _, testCase := range testTable {
var mockLogger = logrus.New()
testCase := testCase
t.Run(testCase.name, func(t *testing.T) {
runningCond := isReportFinished(mockLogger, testCase.report)
assert.Equalf(t, runningCond, testCase.expectFinished, "expected the report would return '%t', but got '%t'", testCase.expectFinished, runningCond)
})
}
}
func TestValidateReport(t *testing.T) {
const (
testNamespace = "default"
testReportName = "test-report"
testQueryName = "test-query"
testInvalidQueryName = "invalid-query"
testInvalidQueryName2 = "invalid-query2"
testNonExistentQueryName = "does-not-exist"
)
ds1 := testhelpers.NewReportDataSource("datasource1", testNamespace)
ds1.Status.TableRef = v1.LocalObjectReference{Name: "initialized-datasource"}
// ds2 is uninitialized
ds2 := testhelpers.NewReportDataSource("datasource2", testNamespace)
newDefault := func(s string) *json.RawMessage {
v := json.RawMessage(s)
return &v
}
testValidQuery := &metering.ReportQuery{
ObjectMeta: metav1.ObjectMeta{
Name: testQueryName,
Namespace: testNamespace,
},
Spec: metering.ReportQuerySpec{
Inputs: []metering.ReportQueryInputDefinition{
{
Name: "ds",
Type: "ReportDataSource",
Required: true,
Default: newDefault((`"` + ds1.Name + `"`)),
},
},
},
}
testInvalidQuery := &metering.ReportQuery{
ObjectMeta: metav1.ObjectMeta{
Name: testInvalidQueryName,
Namespace: testNamespace,
},
Spec: metering.ReportQuerySpec{
Inputs: []metering.ReportQueryInputDefinition{
{
Name: "ds",
Type: "ReportDataSource",
Required: true,
Default: newDefault(`"this-does-not-exist"`),
},
},
},
}
testInvalidQuery2 := &metering.ReportQuery{
ObjectMeta: metav1.ObjectMeta{
Name: testInvalidQueryName2,
Namespace: testNamespace,
},
Spec: metering.ReportQuerySpec{
Inputs: []metering.ReportQueryInputDefinition{
{
Name: "ds",
Type: "ReportDataSource",
Required: true,
Default: newDefault((`"` + ds2.Name + `"`)),
},
},
},
}
dataSourceGetter := testhelpers.NewReportDataSourceStore([]*metering.ReportDataSource{ds1, ds2})
queryGetter := testhelpers.NewReportQueryStore([]*metering.ReportQuery{testValidQuery, testInvalidQuery, testInvalidQuery2})
reportGetter := testhelpers.NewReportStore(nil)
dependencyResolver := reporting.NewDependencyResolver(queryGetter, dataSourceGetter, reportGetter)
reportStart := &time.Time{}
reportEndTmp := reportStart.AddDate(0, 1, 0)
reportEnd := &reportEndTmp
testTable := []struct {
name string
report *metering.Report
expectErr bool
expectErrMsg string
}{
{
name: "empty spec.query returns err",
report: testhelpers.NewReport(testReportName, testNamespace, "", reportStart, reportEnd, metering.ReportStatus{}, nil, false, nil),
expectErr: true,
expectErrMsg: "must set spec.query",
},
{
name: "spec.ReportingStart > spec.ReportingEnd returns err",
report: testhelpers.NewReport(testReportName, testNamespace, testNonExistentQueryName, reportEnd, reportStart, metering.ReportStatus{}, nil, false, nil),
expectErr: true,
expectErrMsg: fmt.Sprintf("spec.reportingEnd (%s) must be after spec.reportingStart (%s)", reportStart.String(), reportEnd.String()),
},
{
name: "spec.ReportingEnd is unset and spec.RunImmediately is set returns err",
report: testhelpers.NewReport(testReportName, testNamespace, testNonExistentQueryName, reportStart, nil, metering.ReportStatus{}, nil, true, nil),
expectErr: true,
expectErrMsg: "spec.reportingEnd must be set if report.spec.runImmediately is true",
},
{
name: "spec.QueryName does not exist returns err",
report: testhelpers.NewReport(testReportName, testNamespace, testNonExistentQueryName, reportStart, reportEnd, metering.ReportStatus{}, nil, false, nil),
expectErr: true,
expectErrMsg: fmt.Sprintf("ReportQuery (%s) does not exist", testNonExistentQueryName),
},
{
name: "valid report with missing DataSource returns error",
report: testhelpers.NewReport(testReportName, testNamespace, testInvalidQueryName, reportStart, reportEnd, metering.ReportStatus{}, nil, true, nil),
expectErr: true,
expectErrMsg: fmt.Sprintf("failed to resolve ReportQuery dependencies %s: %s", testInvalidQueryName, "ReportDataSource.metering.openshift.io \"this-does-not-exist\" not found"),
},
{
name: "valid report with uninitalized DataSource returns error",
report: testhelpers.NewReport(testReportName, testNamespace, testInvalidQueryName2, reportStart, reportEnd, metering.ReportStatus{}, nil, true, nil),
expectErr: true,
expectErrMsg: fmt.Sprintf("failed to validate ReportQuery dependencies %s: ReportQueryDependencyValidationError: uninitialized ReportDataSource dependencies: %s", testInvalidQueryName2, ds2.Name),
},
{
name: "valid report with valid DataSource returns nil",
report: testhelpers.NewReport(testReportName, testNamespace, testQueryName, reportStart, reportEnd, metering.ReportStatus{}, nil, true, nil),
expectErr: false,
expectErrMsg: "",
},
}
for _, testCase := range testTable {
testCase := testCase
noopHandler := &reporting.UninitialiedDependendenciesHandler{HandleUninitializedReportDataSource: func(ds *metering.ReportDataSource) {}}
t.Run(testCase.name, func(t *testing.T) {
_, _, err := validateReport(testCase.report, queryGetter, dependencyResolver, noopHandler)
if testCase.expectErr {
assert.EqualErrorf(t, err, testCase.expectErrMsg, "expected that validateReport would return the correct error message")
} else {
assert.NoErrorf(t, err, "expected the report would return no error, but got '%v'", err)
}
})
}
}
func TestGetReportPeriod(t *testing.T) {
const (
testNamespace = "default"
testReportName = "test-report"
testQueryName = "test-query"
)
invalidSchedule := &metering.ReportSchedule{
Period: metering.ReportPeriodCron,
Cron: nil,
}
validSchedule := &metering.ReportSchedule{
Period: metering.ReportPeriodCron,
Cron: &metering.ReportScheduleCron{Expression: "5 4 * * *"},
}
reportStart := &time.Time{}
reportEndTmp := reportStart.AddDate(0, 1, 0)
reportEnd := &reportEndTmp
lastReportTime := &metav1.Time{Time: reportStart.AddDate(0, 0, 0)}
nextReportTime := &metav1.Time{Time: reportStart.AddDate(0, 1, 0)}
testTable := []struct {
name string
report *metering.Report
expectErr bool
expectPanic bool
}{
{
name: "invalid report with an unset spec.Schedule field returns an error",
report: testhelpers.NewReport(testReportName, testNamespace, testQueryName, nil, nil, metering.ReportStatus{}, nil, false, nil),
expectErr: true,
},
{
name: "valid report with an unset spec.Schedule field returns nil",
report: testhelpers.NewReport(testReportName, testNamespace, testQueryName, reportStart, reportEnd, metering.ReportStatus{}, nil, false, nil),
expectErr: false,
},
{
name: "invalid schedule with a set spec.Schedule field returns error",
report: testhelpers.NewReport(testReportName, testNamespace, testQueryName, reportStart, reportEnd, metering.ReportStatus{}, invalidSchedule, false, nil),
expectErr: true,
},
{
name: "valid schedule with a set spec.Schedule field and an unset Spec.Status.LastReportTime returns nil",
report: testhelpers.NewReport(testReportName, testNamespace, testQueryName, reportStart, reportEnd, metering.ReportStatus{}, validSchedule, false, nil),
expectErr: false,
},
{
name: "valid schedule with a set spec.Schedule field and a set Spec.Status.LastReportTime returns nil",
report: testhelpers.NewReport(testReportName, testNamespace, testQueryName, reportStart, reportEnd, metering.ReportStatus{LastReportTime: lastReportTime}, validSchedule, false, nil),
expectErr: false,
},
{
name: "valid schedule with a set spec.Schedule field and an unset Spec.Status.LastReportTime and a set Spec.ReportingStart returns nil",
report: testhelpers.NewReport(testReportName, testNamespace, testQueryName, reportStart, reportEnd, metering.ReportStatus{}, validSchedule, false, nil),
expectErr: false,
},
{
name: "valid schedule with a set spec.Schedule field and an unset Spec.Status.LastReportTime and an unset Spec.ReportingStart returns nil",
report: testhelpers.NewReport(testReportName, testNamespace, testQueryName, nil, reportEnd, metering.ReportStatus{}, validSchedule, false, nil),
expectErr: false,
},
{
name: "valid schedule with a set spec.Schedule field and an unset Spec.Status.LastReportTime and a set Spec.NextReportTime returns nil",
report: testhelpers.NewReport(testReportName, testNamespace, testQueryName, nil, reportEnd, metering.ReportStatus{NextReportTime: nextReportTime}, validSchedule, false, nil),
expectErr: false,
},
{
name: "unset Spec.Schedule with reportPeriod.periodStart > reportPeriod.periodEnd returns panic",
report: testhelpers.NewReport(testReportName, testNamespace, testQueryName, reportEnd, reportStart, metering.ReportStatus{NextReportTime: nextReportTime}, nil, false, nil),
expectErr: false,
expectPanic: true,
},
}
for _, testCase := range testTable {
var mockLogger = logrus.New()
testCase := testCase
t.Run(testCase.name, func(t *testing.T) {
if testCase.expectPanic {
assert.Panics(t, func() { getReportPeriod(time.Now(), mockLogger, testCase.report) }, "expected the test case would panic, but it did not")
} else {
_, err := getReportPeriod(time.Now(), mockLogger, testCase.report)
if testCase.expectErr {
assert.Error(t, err, "expected that getting the report period would return a non-nil error")
} else {
assert.Nil(t, err, "expected that getting the report period would return a nil error")
}
}
})
}
}
func TestReportsExpireAsExpected(t *testing.T) {
const (
testNamespace = "default"
testReportName = "test-report"
testQueryName = "test-query"
)
testTable := []struct {
name string
nowAdder int
report *metering.Report
}{
{
name: "expired report should be deleted",
report: testhelpers.NewReport(testReportName, testNamespace, testQueryName, nil, nil, metering.ReportStatus{}, nil, false, &metav1.Duration{Duration: 1 * time.Minute}),
nowAdder: 1,
},
{
name: "not expired report should not be deleted",
report: testhelpers.NewReport(testReportName, testNamespace, testQueryName, nil, nil, metering.ReportStatus{}, nil, false, &metav1.Duration{Duration: 1 * time.Minute}),
nowAdder: -1,
},
}
for _, testCase := range testTable {
var mockLogger = logrus.New()
testCase := testCase
t.Run(testCase.name, func(t *testing.T) {
result := isReportExpired(mockLogger, testCase.report, time.Now().Add(time.Minute*1).Add(time.Second*time.Duration(testCase.nowAdder)))
assert.True(t, result, "report should expire as expected")
})
}
}
| TestIsReportFinished |
builder_scope.rs | // This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files (https://github.com/gtk-rs/gir-files)
// DO NOT EDIT | use glib::object::IsA;
use std::fmt;
glib::wrapper! {
pub struct BuilderScope(Interface<ffi::GtkBuilderScope, ffi::GtkBuilderScopeInterface>);
match fn {
get_type => || ffi::gtk_builder_scope_get_type(),
}
}
pub const NONE_BUILDER_SCOPE: Option<&BuilderScope> = None;
pub trait BuilderScopeExt: 'static {}
impl<O: IsA<BuilderScope>> BuilderScopeExt for O {}
impl fmt::Display for BuilderScope {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("BuilderScope")
}
} | |
picnic_table.py | def print_picnic(itemsDict, leftWidth, rightWidth):
print('PICNIC ITEMS'.center(leftWidth + rightWidth, '-'))
for k, v in itemsDict.items():
print(k.ljust(leftWidth, '.') + str(v).rjust(rightWidth))
picnic_items = {'sandwiches': 4, 'apples': 12, 'cups': 4, 'cookies': 8000} | print_picnic(picnic_items, 12, 5)
print_picnic(picnic_items, 20, 6) |
|
categories.js | import React from 'react';
import { Button, Typography } from '@material-ui/core';
import { connect } from 'react-redux';
import { selectCategory } from '../../store/categories.js';
const viewCategories = (props) => {
return (
<>
<Typography variant="h4" component='h4'> Browse By Category </Typography>
<Button variant="contained" color="primary" onClick={() => props.selectCategory('Music')}>Music</Button>
<Button variant="contained" color="primary" onClick={() => props.selectCategory('Books')}>Books</Button>
<Typography>{props.activeDescription}</Typography>
</>
)
}
const mapStateToProps = (state) => {
return {
activeCategory: state.categories.activeCategory,
activeDescription: state.categories.activeDescription
}
}
const mapDispatchToProps = {
selectCategory,
} | export default connect(mapStateToProps, mapDispatchToProps)(viewCategories); | |
testing.rs | use crate::contract::{
assert_max_spread, execute, instantiate, query_pair_info, query_pool, query_reverse_simulation,
query_simulation, reply,
};
use crate::error::ContractError;
use crate::mock_querier::mock_dependencies;
use cosmwasm_std::testing::{mock_env, mock_info, MOCK_CONTRACT_ADDR};
use cosmwasm_std::{
attr, to_binary, BankMsg, Coin, ContractResult, CosmosMsg, Decimal, Reply, ReplyOn, Response,
StdError, SubMsg, SubMsgExecutionResponse, Uint128, WasmMsg,
};
use cw20::{Cw20ExecuteMsg, Cw20ReceiveMsg, MinterResponse};
use terraswap::asset::{Asset, AssetInfo, PairInfo};
use terraswap::pair::{
Cw20HookMsg, ExecuteMsg, InstantiateMsg, PoolResponse, ReverseSimulationResponse,
SimulationResponse,
};
use terraswap::token::InstantiateMsg as TokenInstantiateMsg;
#[test]
fn proper_initialization() {
let mut deps = mock_dependencies(&[]);
let msg = InstantiateMsg {
asset_infos: [
AssetInfo::NativeToken {
denom: "uusd".to_string(),
},
AssetInfo::Token {
contract_addr: "asset0000".to_string(),
},
],
token_code_id: 10u64,
};
// we can just call .unwrap() to assert this was a success
let env = mock_env();
let info = mock_info("addr0000", &[]);
let res = instantiate(deps.as_mut(), env, info, msg).unwrap();
assert_eq!(
res.messages,
vec![SubMsg {
msg: WasmMsg::Instantiate {
code_id: 10u64,
msg: to_binary(&TokenInstantiateMsg {
name: "terraswap liquidity token".to_string(),
symbol: "uLP".to_string(),
decimals: 6,
initial_balances: vec![],
mint: Some(MinterResponse {
minter: MOCK_CONTRACT_ADDR.to_string(),
cap: None,
}),
})
.unwrap(),
funds: vec![],
label: "".to_string(),
admin: None,
}
.into(),
gas_limit: None,
id: 1,
reply_on: ReplyOn::Success,
}]
);
// store liquidity token
let reply_msg = Reply {
id: 1,
result: ContractResult::Ok(SubMsgExecutionResponse {
events: vec![],
data: Some(
vec![
10, 13, 108, 105, 113, 117, 105, 100, 105, 116, 121, 48, 48, 48, 48,
]
.into(),
),
}),
};
let _res = reply(deps.as_mut(), mock_env(), reply_msg).unwrap();
// it worked, let's query the state
let pair_info: PairInfo = query_pair_info(deps.as_ref()).unwrap();
assert_eq!("liquidity0000", pair_info.liquidity_token.as_str());
assert_eq!(
pair_info.asset_infos,
[
AssetInfo::NativeToken {
denom: "uusd".to_string(),
},
AssetInfo::Token {
contract_addr: "asset0000".to_string()
}
]
);
}
#[test]
fn provide_liquidity() {
let mut deps = mock_dependencies(&[Coin {
denom: "uusd".to_string(),
amount: Uint128::from(200u128),
}]);
deps.querier.with_token_balances(&[
(
&"liquidity0000".to_string(),
&[(&MOCK_CONTRACT_ADDR.to_string(), &Uint128::zero())],
),
(&"asset0000".to_string(), &[]),
]);
let msg = InstantiateMsg {
asset_infos: [
AssetInfo::NativeToken {
denom: "uusd".to_string(),
},
AssetInfo::Token {
contract_addr: "asset0000".to_string(),
},
],
token_code_id: 10u64,
};
let env = mock_env();
let info = mock_info("addr0000", &[]);
// we can just call .unwrap() to assert this was a success
let _res = instantiate(deps.as_mut(), env, info, msg).unwrap();
// store liquidity token
let reply_msg = Reply {
id: 1,
result: ContractResult::Ok(SubMsgExecutionResponse {
events: vec![],
data: Some(
vec![
10, 13, 108, 105, 113, 117, 105, 100, 105, 116, 121, 48, 48, 48, 48,
]
.into(),
),
}),
};
let _res = reply(deps.as_mut(), mock_env(), reply_msg).unwrap();
// successfully provide liquidity for the exist pool
let msg = ExecuteMsg::ProvideLiquidity {
assets: [
Asset {
info: AssetInfo::Token {
contract_addr: "asset0000".to_string(),
},
amount: Uint128::from(100u128),
},
Asset {
info: AssetInfo::NativeToken {
denom: "uusd".to_string(),
},
amount: Uint128::from(100u128),
},
],
slippage_tolerance: None,
receiver: None,
};
let env = mock_env();
let info = mock_info(
"addr0000",
&[Coin {
denom: "uusd".to_string(),
amount: Uint128::from(100u128),
}],
);
let res = execute(deps.as_mut(), env, info, msg).unwrap();
let transfer_from_msg = res.messages.get(0).expect("no message");
let mint_msg = res.messages.get(1).expect("no message");
assert_eq!(
transfer_from_msg,
&SubMsg::new(CosmosMsg::Wasm(WasmMsg::Execute {
contract_addr: "asset0000".to_string(),
msg: to_binary(&Cw20ExecuteMsg::TransferFrom {
owner: "addr0000".to_string(),
recipient: MOCK_CONTRACT_ADDR.to_string(),
amount: Uint128::from(100u128),
})
.unwrap(),
funds: vec![],
}))
);
assert_eq!(
mint_msg,
&SubMsg::new(CosmosMsg::Wasm(WasmMsg::Execute {
contract_addr: "liquidity0000".to_string(),
msg: to_binary(&Cw20ExecuteMsg::Mint {
recipient: "addr0000".to_string(),
amount: Uint128::from(100u128),
})
.unwrap(),
funds: vec![],
}))
);
// provide more liquidity 1:2, which is not proportional to 1:1,
// then it must accept 1:1 and treat left amount as donation
deps.querier.with_balance(&[(
&MOCK_CONTRACT_ADDR.to_string(),
vec![Coin {
denom: "uusd".to_string(),
amount: Uint128::from(
200u128 + 200u128, /* user deposit must be pre-applied */
),
}],
)]);
deps.querier.with_token_balances(&[
(
&"liquidity0000".to_string(),
&[(&MOCK_CONTRACT_ADDR.to_string(), &Uint128::from(100u128))],
),
(
&"asset0000".to_string(),
&[(&MOCK_CONTRACT_ADDR.to_string(), &Uint128::from(200u128))],
),
]);
let msg = ExecuteMsg::ProvideLiquidity {
assets: [
Asset {
info: AssetInfo::Token {
contract_addr: "asset0000".to_string(),
},
amount: Uint128::from(100u128),
},
Asset {
info: AssetInfo::NativeToken {
denom: "uusd".to_string(),
},
amount: Uint128::from(200u128),
},
],
slippage_tolerance: None,
receiver: Some("staking0000".to_string()), // try changing receiver
};
let env = mock_env();
let info = mock_info(
"addr0000",
&[Coin {
denom: "uusd".to_string(),
amount: Uint128::from(200u128),
}],
);
// only accept 100, then 50 share will be generated with 100 * (100 / 200)
let res: Response = execute(deps.as_mut(), env, info, msg).unwrap();
let transfer_from_msg = res.messages.get(0).expect("no message");
let mint_msg = res.messages.get(1).expect("no message");
assert_eq!(
transfer_from_msg,
&SubMsg::new(CosmosMsg::Wasm(WasmMsg::Execute {
contract_addr: "asset0000".to_string(),
msg: to_binary(&Cw20ExecuteMsg::TransferFrom {
owner: "addr0000".to_string(),
recipient: MOCK_CONTRACT_ADDR.to_string(),
amount: Uint128::from(100u128),
})
.unwrap(),
funds: vec![],
}))
);
assert_eq!(
mint_msg,
&SubMsg::new(CosmosMsg::Wasm(WasmMsg::Execute {
contract_addr: "liquidity0000".to_string(),
msg: to_binary(&Cw20ExecuteMsg::Mint {
recipient: "staking0000".to_string(), // LP tokens sent to specified receiver
amount: Uint128::from(50u128),
})
.unwrap(),
funds: vec![],
}))
);
// check wrong argument
let msg = ExecuteMsg::ProvideLiquidity {
assets: [
Asset {
info: AssetInfo::Token {
contract_addr: "asset0000".to_string(),
},
amount: Uint128::from(100u128),
},
Asset {
info: AssetInfo::NativeToken {
denom: "uusd".to_string(),
},
amount: Uint128::from(50u128),
},
],
slippage_tolerance: None,
receiver: None,
};
let env = mock_env();
let info = mock_info(
"addr0000",
&[Coin {
denom: "uusd".to_string(),
amount: Uint128::from(100u128),
}],
);
let res = execute(deps.as_mut(), env, info, msg).unwrap_err();
match res {
ContractError::Std(StdError::GenericErr { msg, .. }) => assert_eq!(
msg,
"Native token balance mismatch between the argument and the transferred".to_string()
),
_ => panic!("Must return generic error"),
}
// initialize token balance to 1:1
deps.querier.with_balance(&[(
&MOCK_CONTRACT_ADDR.to_string(),
vec![Coin {
denom: "uusd".to_string(),
amount: Uint128::from(
100u128 + 100u128, /* user deposit must be pre-applied */
),
}],
)]);
deps.querier.with_token_balances(&[
(
&"liquidity0000".to_string(),
&[(&MOCK_CONTRACT_ADDR.to_string(), &Uint128::from(100u128))],
),
(
&"asset0000".to_string(),
&[(&MOCK_CONTRACT_ADDR.to_string(), &Uint128::from(100u128))],
),
]);
// failed because the price is under slippage_tolerance
let msg = ExecuteMsg::ProvideLiquidity {
assets: [
Asset {
info: AssetInfo::Token {
contract_addr: "asset0000".to_string(),
},
amount: Uint128::from(98u128),
},
Asset {
info: AssetInfo::NativeToken {
denom: "uusd".to_string(),
},
amount: Uint128::from(100u128),
},
],
slippage_tolerance: Some(Decimal::percent(1)),
receiver: None,
};
let env = mock_env();
let info = mock_info(
"addr0001",
&[Coin {
denom: "uusd".to_string(),
amount: Uint128::from(100u128),
}],
);
let res = execute(deps.as_mut(), env, info, msg).unwrap_err();
match res {
ContractError::MaxSlippageAssertion {} => {}
_ => panic!("DO NOT ENTER HERE"),
}
// initialize token balance to 1:1
deps.querier.with_balance(&[(
&MOCK_CONTRACT_ADDR.to_string(),
vec![Coin {
denom: "uusd".to_string(),
amount: Uint128::from(100u128 + 98u128 /* user deposit must be pre-applied */),
}],
)]);
// failed because the price is under slippage_tolerance
let msg = ExecuteMsg::ProvideLiquidity {
assets: [
Asset {
info: AssetInfo::Token {
contract_addr: "asset0000".to_string(),
},
amount: Uint128::from(100u128),
},
Asset {
info: AssetInfo::NativeToken {
denom: "uusd".to_string(),
},
amount: Uint128::from(98u128),
},
],
slippage_tolerance: Some(Decimal::percent(1)),
receiver: None,
};
let env = mock_env();
let info = mock_info(
"addr0001",
&[Coin {
denom: "uusd".to_string(),
amount: Uint128::from(98u128),
}],
);
let res = execute(deps.as_mut(), env, info, msg).unwrap_err();
match res {
ContractError::MaxSlippageAssertion {} => {}
_ => panic!("DO NOT ENTER HERE"),
}
// initialize token balance to 1:1
deps.querier.with_balance(&[(
&MOCK_CONTRACT_ADDR.to_string(),
vec![Coin {
denom: "uusd".to_string(),
amount: Uint128::from(
100u128 + 100u128, /* user deposit must be pre-applied */
),
}],
)]);
// successfully provides
let msg = ExecuteMsg::ProvideLiquidity {
assets: [
Asset {
info: AssetInfo::Token {
contract_addr: "asset0000".to_string(),
},
amount: Uint128::from(99u128),
},
Asset {
info: AssetInfo::NativeToken {
denom: "uusd".to_string(),
},
amount: Uint128::from(100u128),
},
],
slippage_tolerance: Some(Decimal::percent(1)),
receiver: None,
};
let env = mock_env();
let info = mock_info(
"addr0001",
&[Coin {
denom: "uusd".to_string(),
amount: Uint128::from(100u128),
}],
);
let _res = execute(deps.as_mut(), env, info, msg).unwrap();
// initialize token balance to 1:1
deps.querier.with_balance(&[(
&MOCK_CONTRACT_ADDR.to_string(),
vec![Coin {
denom: "uusd".to_string(),
amount: Uint128::from(100u128 + 99u128 /* user deposit must be pre-applied */),
}],
)]);
// successfully provides
let msg = ExecuteMsg::ProvideLiquidity {
assets: [
Asset {
info: AssetInfo::Token {
contract_addr: "asset0000".to_string(),
},
amount: Uint128::from(100u128),
},
Asset {
info: AssetInfo::NativeToken {
denom: "uusd".to_string(),
},
amount: Uint128::from(99u128),
},
],
slippage_tolerance: Some(Decimal::percent(1)),
receiver: None,
};
let env = mock_env();
let info = mock_info(
"addr0001",
&[Coin {
denom: "uusd".to_string(),
amount: Uint128::from(99u128),
}],
);
let _res = execute(deps.as_mut(), env, info, msg).unwrap();
}
#[test]
fn withdraw_liquidity() {
let mut deps = mock_dependencies(&[Coin {
denom: "uusd".to_string(),
amount: Uint128::from(100u128),
}]);
deps.querier.with_tax(
Decimal::zero(),
&[(&"uusd".to_string(), &Uint128::from(1000000u128))],
);
deps.querier.with_token_balances(&[
(
&"liquidity0000".to_string(),
&[(&"addr0000".to_string(), &Uint128::from(100u128))],
),
(
&"asset0000".to_string(),
&[(&MOCK_CONTRACT_ADDR.to_string(), &Uint128::from(100u128))],
),
]);
let msg = InstantiateMsg {
asset_infos: [
AssetInfo::NativeToken {
denom: "uusd".to_string(),
},
AssetInfo::Token {
contract_addr: "asset0000".to_string(),
},
],
token_code_id: 10u64,
};
let env = mock_env();
let info = mock_info("addr0000", &[]);
// we can just call .unwrap() to assert this was a success
let _res = instantiate(deps.as_mut(), env, info, msg).unwrap();
// store liquidity token
let reply_msg = Reply {
id: 1,
result: ContractResult::Ok(SubMsgExecutionResponse {
events: vec![],
data: Some(
vec![
10, 13, 108, 105, 113, 117, 105, 100, 105, 116, 121, 48, 48, 48, 48,
]
.into(),
),
}),
};
let _res = reply(deps.as_mut(), mock_env(), reply_msg).unwrap();
// withdraw liquidity
let msg = ExecuteMsg::Receive(Cw20ReceiveMsg {
sender: "addr0000".to_string(),
msg: to_binary(&Cw20HookMsg::WithdrawLiquidity {}).unwrap(),
amount: Uint128::from(100u128),
});
let env = mock_env();
let info = mock_info("liquidity0000", &[]);
let res = execute(deps.as_mut(), env, info, msg).unwrap();
let log_withdrawn_share = res.attributes.get(2).expect("no log");
let log_refund_assets = res.attributes.get(3).expect("no log");
let msg_refund_0 = res.messages.get(0).expect("no message");
let msg_refund_1 = res.messages.get(1).expect("no message");
let msg_burn_liquidity = res.messages.get(2).expect("no message");
assert_eq!(
msg_refund_0,
&SubMsg::new(CosmosMsg::Bank(BankMsg::Send {
to_address: "addr0000".to_string(),
amount: vec![Coin {
denom: "uusd".to_string(),
amount: Uint128::from(100u128),
}],
}))
);
assert_eq!(
msg_refund_1,
&SubMsg::new(CosmosMsg::Wasm(WasmMsg::Execute {
contract_addr: "asset0000".to_string(),
msg: to_binary(&Cw20ExecuteMsg::Transfer {
recipient: "addr0000".to_string(),
amount: Uint128::from(100u128),
})
.unwrap(),
funds: vec![],
}))
);
assert_eq!(
msg_burn_liquidity,
&SubMsg::new(CosmosMsg::Wasm(WasmMsg::Execute {
contract_addr: "liquidity0000".to_string(),
msg: to_binary(&Cw20ExecuteMsg::Burn {
amount: Uint128::from(100u128),
})
.unwrap(),
funds: vec![],
}))
);
assert_eq!(
log_withdrawn_share,
&attr("withdrawn_share", 100u128.to_string())
);
assert_eq!(
log_refund_assets,
&attr("refund_assets", "100uusd, 100asset0000")
);
}
#[test]
fn try_native_to_token() {
let total_share = Uint128::from(30000000000u128);
let asset_pool_amount = Uint128::from(20000000000u128);
let collateral_pool_amount = Uint128::from(30000000000u128);
let exchange_rate: Decimal = Decimal::from_ratio(asset_pool_amount, collateral_pool_amount);
let offer_amount = Uint128::from(1500000000u128);
let mut deps = mock_dependencies(&[Coin {
denom: "uusd".to_string(),
amount: collateral_pool_amount + offer_amount, /* user deposit must be pre-applied */
}]);
deps.querier.with_tax(
Decimal::zero(),
&[(&"uusd".to_string(), &Uint128::from(1000000u128))],
);
deps.querier.with_token_balances(&[
(
&"liquidity0000".to_string(),
&[(&MOCK_CONTRACT_ADDR.to_string(), &total_share)],
),
(
&"asset0000".to_string(),
&[(&MOCK_CONTRACT_ADDR.to_string(), &asset_pool_amount)],
),
]);
let msg = InstantiateMsg {
asset_infos: [
AssetInfo::NativeToken {
denom: "uusd".to_string(),
},
AssetInfo::Token {
contract_addr: "asset0000".to_string(),
},
],
token_code_id: 10u64,
};
let env = mock_env();
let info = mock_info("addr0000", &[]);
// we can just call .unwrap() to assert this was a success
let _res = instantiate(deps.as_mut(), env, info, msg).unwrap();
// store liquidity token
let reply_msg = Reply {
id: 1,
result: ContractResult::Ok(SubMsgExecutionResponse {
events: vec![],
data: Some(
vec![
10, 13, 108, 105, 113, 117, 105, 100, 105, 116, 121, 48, 48, 48, 48,
]
.into(),
),
}),
};
let _res = reply(deps.as_mut(), mock_env(), reply_msg).unwrap();
// normal swap
let msg = ExecuteMsg::Swap {
offer_asset: Asset {
info: AssetInfo::NativeToken {
denom: "uusd".to_string(),
},
amount: offer_amount,
},
belief_price: None,
max_spread: None,
to: None,
};
let env = mock_env();
let info = mock_info(
"addr0000",
&[Coin {
denom: "uusd".to_string(),
amount: offer_amount,
}],
);
let res = execute(deps.as_mut(), env, info, msg).unwrap();
let msg_transfer = res.messages.get(0).expect("no message");
// current price is 1.5, so expected return without spread is 1000
// 952.380952 = 20000 - 20000 * 30000 / (30000 + 1500)
let expected_ret_amount = Uint128::from(952_380_952u128);
let expected_spread_amount = (offer_amount * exchange_rate)
.checked_sub(expected_ret_amount)
.unwrap();
let expected_commission_amount = expected_ret_amount.multiply_ratio(3u128, 1000u128); // 0.3%
let expected_return_amount = expected_ret_amount
.checked_sub(expected_commission_amount)
.unwrap();
let expected_tax_amount = Uint128::zero(); // no tax for token
// check simulation res
deps.querier.with_balance(&[(
&MOCK_CONTRACT_ADDR.to_string(),
vec![Coin {
denom: "uusd".to_string(),
amount: collateral_pool_amount, /* user deposit must be pre-applied */
}],
)]);
let simulation_res: SimulationResponse = query_simulation(
deps.as_ref(),
Asset {
info: AssetInfo::NativeToken {
denom: "uusd".to_string(),
},
amount: offer_amount,
},
)
.unwrap();
assert_eq!(expected_return_amount, simulation_res.return_amount);
assert_eq!(expected_commission_amount, simulation_res.commission_amount);
assert_eq!(expected_spread_amount, simulation_res.spread_amount);
// check reverse simulation res
let reverse_simulation_res: ReverseSimulationResponse = query_reverse_simulation(
deps.as_ref(),
Asset {
info: AssetInfo::Token {
contract_addr: "asset0000".to_string(),
},
amount: expected_return_amount,
},
)
.unwrap();
assert!(
(offer_amount.u128() as i128 - reverse_simulation_res.offer_amount.u128() as i128).abs()
< 3i128
);
assert!(
(expected_commission_amount.u128() as i128
- reverse_simulation_res.commission_amount.u128() as i128)
.abs()
< 3i128
);
assert!(
(expected_spread_amount.u128() as i128
- reverse_simulation_res.spread_amount.u128() as i128)
.abs()
< 3i128
);
assert_eq!(
res.attributes,
vec![
attr("action", "swap"),
attr("sender", "addr0000"),
attr("receiver", "addr0000"),
attr("offer_asset", "uusd"),
attr("ask_asset", "asset0000"),
attr("offer_amount", offer_amount.to_string()),
attr("return_amount", expected_return_amount.to_string()),
attr("tax_amount", expected_tax_amount.to_string()),
attr("spread_amount", expected_spread_amount.to_string()),
attr("commission_amount", expected_commission_amount.to_string()),
]
);
assert_eq!(
&SubMsg::new(CosmosMsg::Wasm(WasmMsg::Execute {
contract_addr: "asset0000".to_string(),
msg: to_binary(&Cw20ExecuteMsg::Transfer {
recipient: "addr0000".to_string(),
amount: expected_return_amount,
})
.unwrap(),
funds: vec![],
})),
msg_transfer,
);
}
#[test]
fn try_token_to_native() {
let total_share = Uint128::from(20000000000u128);
let asset_pool_amount = Uint128::from(30000000000u128);
let collateral_pool_amount = Uint128::from(20000000000u128);
let exchange_rate = Decimal::from_ratio(collateral_pool_amount, asset_pool_amount);
let offer_amount = Uint128::from(1500000000u128);
let mut deps = mock_dependencies(&[Coin {
denom: "uusd".to_string(),
amount: collateral_pool_amount,
}]);
deps.querier.with_tax(
Decimal::percent(1),
&[(&"uusd".to_string(), &Uint128::from(1000000u128))],
);
deps.querier.with_token_balances(&[
(
&"liquidity0000".to_string(),
&[(&MOCK_CONTRACT_ADDR.to_string(), &total_share)],
),
(
&"asset0000".to_string(),
&[(
&MOCK_CONTRACT_ADDR.to_string(),
&(asset_pool_amount + offer_amount),
)],
),
]);
let msg = InstantiateMsg {
asset_infos: [
AssetInfo::NativeToken {
denom: "uusd".to_string(),
},
AssetInfo::Token {
contract_addr: "asset0000".to_string(),
},
],
token_code_id: 10u64,
};
let env = mock_env();
let info = mock_info("addr0000", &[]);
// we can just call .unwrap() to assert this was a success
let _res = instantiate(deps.as_mut(), env, info, msg).unwrap();
// store liquidity token
let reply_msg = Reply {
id: 1,
result: ContractResult::Ok(SubMsgExecutionResponse {
events: vec![],
data: Some(
vec![
10, 13, 108, 105, 113, 117, 105, 100, 105, 116, 121, 48, 48, 48, 48,
]
.into(),
),
}),
};
let _res = reply(deps.as_mut(), mock_env(), reply_msg).unwrap();
// unauthorized access; can not execute swap directly for token swap
let msg = ExecuteMsg::Swap {
offer_asset: Asset {
info: AssetInfo::Token {
contract_addr: "asset0000".to_string(),
},
amount: offer_amount,
},
belief_price: None,
max_spread: None,
to: None,
};
let env = mock_env();
let info = mock_info("addr0000", &[]);
let res = execute(deps.as_mut(), env, info, msg).unwrap_err();
match res {
ContractError::Unauthorized {} => (),
_ => panic!("DO NOT ENTER HERE"),
}
// normal sell
let msg = ExecuteMsg::Receive(Cw20ReceiveMsg {
sender: "addr0000".to_string(),
amount: offer_amount,
msg: to_binary(&Cw20HookMsg::Swap {
belief_price: None,
max_spread: None,
to: None,
})
.unwrap(),
});
let env = mock_env();
let info = mock_info("asset0000", &[]);
let res = execute(deps.as_mut(), env, info, msg).unwrap();
let msg_transfer = res.messages.get(0).expect("no message");
// current price is 1.5, so expected return without spread is 1000
// 952.380952 = 20000 - 20000 * 30000 / (30000 + 1500)
let expected_ret_amount = Uint128::from(952_380_952u128);
let expected_spread_amount = (offer_amount * exchange_rate)
.checked_sub(expected_ret_amount)
.unwrap();
let expected_commission_amount = expected_ret_amount.multiply_ratio(3u128, 1000u128); // 0.3%
let expected_return_amount = expected_ret_amount
.checked_sub(expected_commission_amount)
.unwrap();
let expected_tax_amount = std::cmp::min(
Uint128::from(1000000u128),
expected_return_amount
.checked_sub(
expected_return_amount
.multiply_ratio(Uint128::from(100u128), Uint128::from(101u128)),
)
.unwrap(),
);
// check simulation res
// return asset token balance as normal
deps.querier.with_token_balances(&[
(
&"liquidity0000".to_string(),
&[(&MOCK_CONTRACT_ADDR.to_string(), &total_share)],
),
(
&"asset0000".to_string(),
&[(&MOCK_CONTRACT_ADDR.to_string(), &(asset_pool_amount))],
),
]);
let simulation_res: SimulationResponse = query_simulation(
deps.as_ref(),
Asset {
amount: offer_amount,
info: AssetInfo::Token {
contract_addr: "asset0000".to_string(),
},
},
)
.unwrap();
assert_eq!(expected_return_amount, simulation_res.return_amount);
assert_eq!(expected_commission_amount, simulation_res.commission_amount);
assert_eq!(expected_spread_amount, simulation_res.spread_amount);
// check reverse simulation res
let reverse_simulation_res: ReverseSimulationResponse = query_reverse_simulation(
deps.as_ref(),
Asset {
amount: expected_return_amount,
info: AssetInfo::NativeToken {
denom: "uusd".to_string(),
},
},
)
.unwrap();
assert!(
(offer_amount.u128() as i128 - reverse_simulation_res.offer_amount.u128() as i128).abs()
< 3i128
);
assert!(
(expected_commission_amount.u128() as i128
- reverse_simulation_res.commission_amount.u128() as i128)
.abs()
< 3i128
);
assert!(
(expected_spread_amount.u128() as i128
- reverse_simulation_res.spread_amount.u128() as i128)
.abs()
< 3i128
);
assert_eq!(
res.attributes,
vec![
attr("action", "swap"),
attr("sender", "addr0000"),
attr("receiver", "addr0000"),
attr("offer_asset", "asset0000"),
attr("ask_asset", "uusd"),
attr("offer_amount", offer_amount.to_string()),
attr("return_amount", expected_return_amount.to_string()),
attr("tax_amount", expected_tax_amount.to_string()),
attr("spread_amount", expected_spread_amount.to_string()),
attr("commission_amount", expected_commission_amount.to_string()),
]
);
assert_eq!(
&SubMsg::new(CosmosMsg::Bank(BankMsg::Send {
to_address: "addr0000".to_string(),
amount: vec![Coin {
denom: "uusd".to_string(),
amount: expected_return_amount
.checked_sub(expected_tax_amount)
.unwrap(),
}],
})),
msg_transfer,
);
// failed due to non asset token contract try to execute sell
let msg = ExecuteMsg::Receive(Cw20ReceiveMsg {
sender: "addr0000".to_string(),
amount: offer_amount,
msg: to_binary(&Cw20HookMsg::Swap {
belief_price: None,
max_spread: None,
to: None,
})
.unwrap(),
});
let env = mock_env();
let info = mock_info("liquidity0000", &[]);
let res = execute(deps.as_mut(), env, info, msg).unwrap_err();
match res {
ContractError::Unauthorized {} => (),
_ => panic!("DO NOT ENTER HERE"),
}
}
#[test]
fn test_max_spread() {
assert_max_spread(
Some(Decimal::from_ratio(1200u128, 1u128)),
Some(Decimal::percent(1)),
Uint128::from(1200000000u128),
Uint128::from(989999u128),
Uint128::zero(),
)
.unwrap_err();
assert_max_spread(
Some(Decimal::from_ratio(1200u128, 1u128)),
Some(Decimal::percent(1)),
Uint128::from(1200000000u128),
Uint128::from(990000u128),
Uint128::zero(),
)
.unwrap();
assert_max_spread(
None,
Some(Decimal::percent(1)),
Uint128::zero(),
Uint128::from(989999u128),
Uint128::from(10001u128),
)
.unwrap_err();
assert_max_spread(
None,
Some(Decimal::percent(1)),
Uint128::zero(),
Uint128::from(990000u128),
Uint128::from(10000u128),
)
.unwrap();
}
#[test]
fn test_deduct() {
let mut deps = mock_dependencies(&[]);
let tax_rate = Decimal::percent(2);
let tax_cap = Uint128::from(1_000_000u128);
deps.querier.with_tax(
Decimal::percent(2),
&[(&"uusd".to_string(), &Uint128::from(1000000u128))],
);
let amount = Uint128::from(1_000_000_000u128);
let expected_after_amount = std::cmp::max(
amount.checked_sub(amount * tax_rate).unwrap(),
amount.checked_sub(tax_cap).unwrap(),
);
let after_amount = (Asset {
info: AssetInfo::NativeToken {
denom: "uusd".to_string(),
},
amount,
})
.deduct_tax(&deps.as_ref().querier)
.unwrap();
assert_eq!(expected_after_amount, after_amount.amount);
}
#[test]
fn | () {
let total_share_amount = Uint128::from(111u128);
let asset_0_amount = Uint128::from(222u128);
let asset_1_amount = Uint128::from(333u128);
let mut deps = mock_dependencies(&[Coin {
denom: "uusd".to_string(),
amount: asset_0_amount,
}]);
deps.querier.with_token_balances(&[
(
&"asset0000".to_string(),
&[(&MOCK_CONTRACT_ADDR.to_string(), &asset_1_amount)],
),
(
&"liquidity0000".to_string(),
&[(&MOCK_CONTRACT_ADDR.to_string(), &total_share_amount)],
),
]);
let msg = InstantiateMsg {
asset_infos: [
AssetInfo::NativeToken {
denom: "uusd".to_string(),
},
AssetInfo::Token {
contract_addr: "asset0000".to_string(),
},
],
token_code_id: 10u64,
};
let env = mock_env();
let info = mock_info("addr0000", &[]);
// we can just call .unwrap() to assert this was a success
let _res = instantiate(deps.as_mut(), env, info, msg).unwrap();
// store liquidity token
let reply_msg = Reply {
id: 1,
result: ContractResult::Ok(SubMsgExecutionResponse {
events: vec![],
data: Some(
vec![
10, 13, 108, 105, 113, 117, 105, 100, 105, 116, 121, 48, 48, 48, 48,
]
.into(),
),
}),
};
let _res = reply(deps.as_mut(), mock_env(), reply_msg).unwrap();
let res: PoolResponse = query_pool(deps.as_ref()).unwrap();
assert_eq!(
res.assets,
[
Asset {
info: AssetInfo::NativeToken {
denom: "uusd".to_string(),
},
amount: asset_0_amount
},
Asset {
info: AssetInfo::Token {
contract_addr: "asset0000".to_string(),
},
amount: asset_1_amount
}
]
);
assert_eq!(res.total_share, total_share_amount);
}
| test_query_pool |
interface.go | package expression
import (
"fmt"
"github.com/recursivecurry/golanout/experiment/base"
)
// A type that satisfies the pure.Interface can be a base.Value
type Interface interface {
base.Interface
// Value return the pure value based on inputs, params and salt.
Value(*base.Context) (base.Value, error)
}
// GetBool return a bool value from a pure.Interface value.
func GetBool(env *base.Context, value Interface) (bool, error) {
v, err := value.Value(env)
if err != nil {
return false, err
} | return v, nil
case float64:
return v != 0, nil
default:
return false, fmt.Errorf("wrong bool value: %+v", v)
}
}
// GetNumber return a number value from a pure.Interface value.
func GetNumber(env *base.Context, value Interface) (float64, error) {
v, err := value.Value(env)
if err != nil {
return 0, err
}
switch v := v.(type) {
case float64:
return v, nil
default:
return 0, fmt.Errorf("wrong number value: %+v", v)
}
} | switch v := v.(type) {
case bool: |
cv_segmentation.py | import os
import numpy as np
import cv2
import csv
def find_parts(skeleton_reader):
for row in skeleton_reader:
head_color = (float(row['Head_color_X'].replace(',', '.')), float(row['Head_color_Y'].replace(',', '.')))
head_depth = (float(row['Head_depth_X'].replace(',', '.')), float(row['Head_depth_Y'].replace(',', '.')))
hand_left_color = (float(row['WristLeft_color_X'].replace(',', '.')), float(row['WristLeft_color_Y'].replace(',', '.')))
hand_left_depth = (float(row['WristLeft_depth_X'].replace(',', '.')), float(row['WristLeft_depth_Y'].replace(',', '.')))
hand_right_color = (float(row['WristRight_color_X'].replace(',', '.')), float(row['WristRight_color_Y'].replace(',', '.')))
hand_right_depth = (float(row['WristRight_depth_X'].replace(',', '.')), float(row['WristRight_depth_Y'].replace(',', '.')))
return (head_color, head_depth, hand_left_color, hand_left_depth, hand_right_color, hand_right_depth)
def | (path, parts):
img = cv2.imread(path)
for part in parts:
img = cv2.circle(img, part, 5, (255,0,0), 2)
skeleton_file = open(, encoding='utf8')
skeleton_reader = csv.DictReader(skeleton_file)
| mark_parts |
Ellipsis.tsx | import * as classNames from 'classnames';
import * as React from 'react'; |
import {
Bulma,
getActiveModifiers, getFocusedModifiers,
removeActiveModifiers, removeFocusedModifiers,
withHelpersModifiers,
} from './../../bulma';
import { combineModifiers, getHTMLProps } from './../../helpers';
export interface Ellipsis<T> extends Bulma.Active, Bulma.Focused, Bulma.Tag, React.HTMLProps<T> {
}
export function Ellipsis({ tag = 'span', ...props }: Ellipsis<HTMLElement>) {
const className = classNames(
'pagination-ellipsis',
{
...combineModifiers(props, getActiveModifiers, getFocusedModifiers),
},
props.className,
);
const { children, ...HTMLProps } = getHTMLProps(props, removeActiveModifiers, removeFocusedModifiers);
return React.createElement(tag, { ...HTMLProps, className }, '\u2026');
}
const HOC = /*@__PURE__*/withHelpersModifiers(Ellipsis);
export default HOC; | |
index.ts |
import { useRemote } from '../../../env'
import {
NewTodo,
TodoEdge,
todosVar,
useCreateTodoMutation
} from "../../../state";
export const CREATE_TODO = gql`
mutation CreateTodo($title: String!) {
createTodo(input: { title: $title }) {
id
title
}
}
`;
const useCreateTodoLocal = () => {
const createTodo = useCallback<(input: NewTodo) => void>((input) => {
const prev = todosVar();
const id = uuid();
const next: TodoEdge = {
cursor: id,
node: { ...input, id, completed: false },
};
todosVar({ ...prev, edges: prev.edges.concat(next) });
}, []);
return [createTodo];
};
const useCreateTodoRemote = () => {
const [createTodoMutation] = useCreateTodoMutation();
const createTodo = useCallback<(input: NewTodo) => void>((input) => {
createTodoMutation({
refetchQueries: ['GetTodos'],
variables: { title: input.title },
});
}, []);
return [createTodo];
};
export const useCreateTodo = useRemote() ? useCreateTodoRemote : useCreateTodoLocal; | import { useCallback } from "react";
import gql from "graphql-tag";
import { v4 as uuid } from "uuid"; |
|
prpc.py | # Copyright 2018 The LUCI Authors. All rights reserved.
# Use of this source code is governed under the Apache License, Version 2.0
# that can be found in the LICENSE file.
"""Defines pRPC server interceptor that initializes auth context."""
import logging
from components import prpc
from . import api
from . import check
from . import config
from . import delegation
from . import ipaddr
from . import model
# Part of public API of 'auth' component, exposed by this module.
__all__ = ['prpc_interceptor']
def prpc_interceptor(request, context, call_details, continuation):
"""Initializes the auth context and catches auth exceptions.
Validates Authorization header, delegation tokens and checks IP whitelist.
On success updates the auth context in the thread-local storage. This makes
various components.auth functions work from inside pRPC handlers.
Args:
request: deserialized request message.
context: an instance of prpc.ServicerContext.
call_details: an instance of prpc.HandlerCallDetails.
continuation: a callback that resumes the processing.
"""
try:
peer_ip = _parse_rpc_peer(context.peer())
except ValueError as exc:
context.set_code(prpc.StatusCode.INTERNAL)
context.set_details(
'Could not parse peer IP "%s": %s' % (context.peer(), exc))
logging.error('Could not parse peer IP "%s": %s', context.peer(), exc)
return
metadata = call_details.invocation_metadata
try:
_prepare_auth_context(metadata, peer_ip)
return continuation(request, context, call_details)
except api.AuthenticationError as exc:
_log_auth_error('Authentication error', exc, metadata, peer_ip)
context.set_code(prpc.StatusCode.UNAUTHENTICATED)
context.set_details(exc.message)
except api.AuthorizationError as exc:
_log_auth_error('Authorization error', exc, metadata, peer_ip)
context.set_code(prpc.StatusCode.PERMISSION_DENIED)
context.set_details(exc.message)
### Private stuff.
# Keys to look up in the metadata. Must be lowercase.
_AUTHORIZATION_METADATA_KEY = 'authorization'
_DELEGATION_METADATA_KEY = delegation.HTTP_HEADER.lower()
_X_LUCI_PROJECT_METADATA_KEY = check.X_LUCI_PROJECT.lower()
def _parse_rpc_peer(rpc_peer):
|
def _grab_metadata(metadata, key):
"""Searches for a metadata value given a key, first one wins."""
for k, v in metadata:
if k == key:
return v
return None
def _prepare_auth_context(metadata, peer_ip):
"""Initializes authentication context for the thread.
Args:
metadata: RPC invocation metadata, as a list of (k, v) pairs.
peer_ip: ipaddr.IP with the peer IP address.
Raises:
api.AuthenticationError if the authentication token is malformed.
api.AuthorizationError if the caller is not in the IP whitelist or not
authorized to use the delegation token.
"""
conf = config.ensure_configured()
ctx = api.reinitialize_request_cache()
# Verify the OAuth token (including client_id check), if given.
auth_details = None
auth_header = _grab_metadata(metadata, _AUTHORIZATION_METADATA_KEY)
if auth_header:
peer_identity, auth_details = api.check_oauth_access_token(auth_header)
else:
peer_identity = model.Anonymous
# Verify the caller is allowed to make calls from the given IP and use the
# delegation token (if any). It raises AuthorizationError if something is
# not allowed. Populates auth context fields.
check.check_request(
ctx=ctx,
peer_identity=peer_identity,
peer_ip=peer_ip,
auth_details=auth_details,
delegation_token=_grab_metadata(metadata, _DELEGATION_METADATA_KEY),
project_header=_grab_metadata(metadata, _X_LUCI_PROJECT_METADATA_KEY),
use_project_identites=conf.USE_PROJECT_IDENTITIES,
use_bots_ip_whitelist=True)
def _log_auth_error(title, exc, metadata, peer_ip):
"""Logs an authentication or authorization error to the log (as warning).
Args:
title: the title of the error.
exc: the corresponding exception.
metadata: RPC invocation metadata, as a list of (k, v) pairs.
peer_ip: ipaddr.IP with the peer IP address.
"""
logging.warning(
'%s.\n%s\nPeer: %s\nIP: %s\nOrigin: %s',
title, exc.message,
api.get_peer_identity().to_bytes(),
ipaddr.ip_to_string(peer_ip),
_grab_metadata(metadata, 'origin') or '<unknown>')
| """Parses RPC peer identifier into ipaddr.IP struct.
Raises:
ValueError if rpc_peer is malformed.
"""
if rpc_peer.startswith('ipv4:'):
ip_str = rpc_peer[len('ipv4:'):]
elif rpc_peer.startswith('ipv6:'):
ip_str = rpc_peer[len('ipv6:'):].strip('[]')
else:
raise ValueError('unrecognized RPC peer ID scheme')
return ipaddr.ip_from_string(ip_str) |
template-test.ts | /* eslint-disable @typescript-eslint/no-explicit-any */
import Component from '@glimmer/component';
import { setComponentTemplate } from '@ember/component';
import { click, render, settled } from '@ember/test-helpers';
import { hbs } from 'ember-cli-htmlbars';
import { module, test } from 'qunit';
import { setupRenderingTest } from 'ember-qunit';
import { tracked } from 'ember-deep-tracked';
module('deep tracked (in templates)', function (hooks) {
setupRenderingTest(hooks);
module('Objects', function () {
test('object access', async function (assert) {
class Foo extends Component {
@tracked obj = {} as any;
}
this.owner.register(
'component:foo',
setComponentTemplate(
hbs`<button type="button" {{on 'click' (fn @setNext this)}}>thing</button>
<out>{{this.obj.foo.bar}}</out>`,
Foo
)
);
this.setProperties({ setNext: () => {} });
const doThing = async (callback: (data: Foo) => void) => {
this.setProperties({ setNext: callback });
await settled(); // wait for reactivity before clicking -- does click do this for us?
await click('button');
};
await render(hbs`<Foo @setNext={{this.setNext}}/>`);
assert.dom('out').hasNoText();
await doThing((instance) => (instance.obj.foo = { bar: 3 }));
assert.dom('out').hasText('3');
await doThing((instance) => (instance.obj.foo = { bar: 4 }));
assert.dom('out').hasText('4');
await doThing((instance) => (instance.obj = { foo: { bar: 5 } }));
assert.dom('out').hasText('5');
await doThing((instance) => (instance.obj.foo = { bar: 4 }));
assert.dom('out').hasText('4');
});
test('it works with nested arrays', async function (assert) {
class Foo extends Component {
@tracked obj = {
array: [], | } as any;
}
this.owner.register(
'component:foo',
setComponentTemplate(
hbs`
<button type="button" {{on 'click' (fn @setNext this)}}>thing</button>
<ul>
{{#each this.obj.array as |item|}}
<li>{{item}}</li>
{{/each}}
</ul>`,
Foo
)
);
this.setProperties({ setNext: () => {} });
const doThing = async (callback: (data: Foo) => void) => {
this.setProperties({ setNext: callback });
await settled(); // wait for reactivity before clicking -- does click do this for us?
await click('button');
};
await render(hbs`<Foo @setNext={{this.setNext}}/>`);
assert.dom('li').doesNotExist();
await doThing((instance) => {
instance.obj.array.push('1');
});
assert.dom('li').exists({ count: 1 });
});
});
module('Arrays', function () {
module('{{each}}', function () {
test('it works with shallow arrays', async function (assert) {
let myArray = tracked([1, 2, 3]);
this.setProperties({ myArray });
await render(hbs`
<ul>
{{#each this.myArray as |item|}}
<li>{{item}}</li>
{{/each}}
</ul>
`);
assert.dom('li').exists({ count: 3 });
myArray.push(4);
await settled();
assert.dom('li').exists({ count: 4 });
assert.dom().hasText('1 2 3 4');
myArray[2] = 5;
await settled();
assert.dom().hasText('1 2 5 4');
});
test('it works with deep arrays', async function (assert) {
let myArray = tracked([[1, 2, 3]]);
this.setProperties({ myArray });
await render(hbs`
<ul>
{{#each this.myArray as |collection|}}
{{#each collection as |item|}}
<li>{{item}}</li>
{{/each}}
{{/each}}
</ul>
`);
assert.dom('li').exists({ count: 3 });
myArray[0].push(4);
await settled();
assert.dom('li').exists({ count: 4 });
assert.dom().hasText('1 2 3 4');
myArray[0][2] = 5;
await settled();
assert.dom().hasText('1 2 5 4');
});
});
module('#slice', function () {
test('it works', async function (assert) {
class Foo extends Component {
@tracked obj = [0, 1, 3] as any;
slice = () => (this.obj = this.obj.slice(1));
}
this.owner.register(
'component:foo',
setComponentTemplate(
hbs`<button type="button" {{on 'click' this.slice}}>thing</button>
<out>{{this.obj}}</out>`,
Foo
)
);
await render(hbs`<Foo />`);
assert.dom('out').hasText('0,1,3');
await click('button');
assert.dom('out').hasText('1,3');
});
test('it works on a deeply nested arrays', async function (assert) {
class Foo extends Component {
@tracked obj = { children: [{ property: [0, 1, 3] }] };
slice = () => {
this.obj.children[0].property = this.obj.children[0].property.slice(1);
};
get output() {
return this.obj.children[0].property;
}
}
this.owner.register(
'component:foo',
setComponentTemplate(
hbs`<button type="button" {{on 'click' this.slice}}>thing</button>
<out>{{this.output}}</out>`,
Foo
)
);
await render(hbs`<Foo />`);
assert.dom('out').hasText('0,1,3');
await click('button');
assert.dom('out').hasText('1,3');
});
});
module('#splice', function () {
test('it works', async function (assert) {
class Foo extends Component {
@tracked obj = [0, 1, 3] as any;
splice = () => this.obj.splice(1, 1);
}
this.owner.register(
'component:foo',
setComponentTemplate(
hbs`<button type="button" {{on 'click' this.splice}}>thing</button>
<out>{{this.obj}}</out>`,
Foo
)
);
await render(hbs`<Foo />`);
assert.dom('out').hasText('0,1,3');
await click('button');
assert.dom('out').hasText('0,3');
});
test('it works on a deeply nested array', async function (assert) {
class Foo extends Component {
@tracked obj = { children: [{ property: [0, 1, 3] }] };
splice = () => this.obj.children[0].property.splice(1, 1);
get output() {
return this.obj.children[0].property;
}
}
this.owner.register(
'component:foo',
setComponentTemplate(
hbs`<button type="button" {{on 'click' this.splice}}>thing</button>
<out>{{this.output}}</out>`,
Foo
)
);
await render(hbs`<Foo />`);
assert.dom('out').hasText('0,1,3');
await click('button');
assert.dom('out').hasText('0,3');
});
test('it works on nested array being immutably re-set', async function (assert) {
class Foo extends Component {
@tracked arr = [
{
id: 0,
prop: 'foo',
},
{
id: 1,
prop: 'bar',
},
{
id: 2,
prop: 'baz',
},
];
changeValue = () =>
(this.arr = this.arr.map((el) => {
if (el.id === 1) {
return {
...el,
prop: 'boink',
};
}
return el;
}));
}
this.owner.register(
'component:foo',
setComponentTemplate(
hbs`<button type="button" {{on 'click' this.changeValue}}>thing</button>
{{#each this.arr as |item index|}}
<div id={{concat "item" index}}>{{item.prop}}</div>
{{/each}}
`,
Foo
)
);
await render(hbs`<Foo />`);
assert.dom('#item1').hasText('bar');
await click('button');
assert.dom('#item1').hasText('boink');
});
});
});
}); | |
test_spectrum_creation.py | import pytest
from numpy.random import randn
from numpy.random import random
import numpy as np
def | ():
from ..cube import EmissionCube
from astropy.coordinates import SkyCoord
import astropy.units as u
'''
Test that an anti-center pointing returns zero emission
'''
l = 180. + randn()*130.
b = 0. + randn()*20.
while (l > 340.) | (l < 20.): # Ensure actual non-detection
l = 180. + randn()*130.
c = SkyCoord(l = l*u.deg, b = b*u.deg, frame = 'galactic', galcen_distance = 8.127*u.kpc)
spec = EmissionCube.create_DK19_spectrum(c, 0.5 * u.deg, redden = False)
assert np.allclose(spec.value, np.zeros_like(spec.value))
def test_coordinate_error():
from ..cube import EmissionCube
import astropy.units as u
'''
Ensure that a SkyCoord Object is required
'''
l = 0. + randn()*5.
b = 0. + randn()*3.
try:
spec = EmissionCube.create_DK19_spectrum((l,b), 0.5 * u.deg, redden = False)
except TypeError:
assert True
else:
assert False
def test_galcen_distance():
from ..cube import EmissionCube
import astropy.units as u
from astropy.coordinates import SkyCoord
'''
Ensure that a default galcen_distnace is adopted
'''
l = 0. + randn()*5.
b = 0. + randn()*3.
c = SkyCoord(l = l*u.deg, b = b*u.deg, frame = 'galactic')
c2 = SkyCoord(l = l*u.deg, b = b*u.deg, frame = 'galactic', galcen_distance = 8.127*u.kpc)
spec = EmissionCube.create_DK19_spectrum(c, 0.5 * u.deg, redden = False)
spec2 = EmissionCube.create_DK19_spectrum(c2, 0.5 * u.deg, redden = False)
assert np.allclose(spec.value, spec2.value)
def test_radius_degrees():
from ..cube import EmissionCube
import astropy.units as u
from astropy.coordinates import SkyCoord
'''
Ensure that a default units for radius are in
'''
l = 0. + randn()*5.
b = 0. + randn()*3.
c = SkyCoord(l = l*u.deg, b = b*u.deg, frame = 'galactic', galcen_distance = 8.127*u.kpc)
r1 = np.abs( randn()*1000.) * u.arcmin
r2 = r1.to(u.deg).value
spec = EmissionCube.create_DK19_spectrum(c, r1, redden = False)
spec2 = EmissionCube.create_DK19_spectrum(c, r2, redden = False)
assert np.allclose(spec.value, spec2.value)
| test_non_detection |
docker_network.py | #!/usr/bin/python
#
# Copyright 2016 Red Hat | Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = '''
module: docker_network
short_description: Manage Docker networks
description:
- Create/remove Docker networks and connect containers to them.
- Performs largely the same function as the "docker network" CLI subcommand.
options:
name:
description:
- Name of the network to operate on.
type: str
required: yes
aliases:
- network_name
connected:
description:
- List of container names or container IDs to connect to a network.
- Please note that the module only makes sure that these containers are connected to the network,
but does not care about connection options. If you rely on specific IP addresses etc., use the
M(community.general.docker_container) module to ensure your containers are correctly connected to this network.
type: list
elements: str
aliases:
- containers
driver:
description:
- Specify the type of network. Docker provides bridge and overlay drivers, but 3rd party drivers can also be used.
type: str
default: bridge
driver_options:
description:
- Dictionary of network settings. Consult docker docs for valid options and values.
type: dict
force:
description:
- With state C(absent) forces disconnecting all containers from the
network prior to deleting the network. With state C(present) will
disconnect all containers, delete the network and re-create the
network.
- This option is required if you have changed the IPAM or driver options
and want an existing network to be updated to use the new options.
type: bool
default: no
appends:
description:
- By default the connected list is canonical, meaning containers not on the list are removed from the network.
- Use I(appends) to leave existing containers connected.
type: bool
default: no
aliases:
- incremental
enable_ipv6:
description:
- Enable IPv6 networking.
type: bool
ipam_driver:
description:
- Specify an IPAM driver.
type: str
ipam_driver_options:
description:
- Dictionary of IPAM driver options.
type: dict
ipam_options:
description:
- Dictionary of IPAM options.
- Deprecated in 2.8, will be removed in community.general 2.0.0. Use parameter I(ipam_config) instead. In Docker 1.10.0, IPAM
options were introduced (see L(here,https://github.com/moby/moby/pull/17316)). This module parameter addresses
the IPAM config not the newly introduced IPAM options. For the IPAM options, see the I(ipam_driver_options)
parameter.
type: dict
suboptions:
subnet:
description:
- IP subset in CIDR notation.
type: str
iprange:
description:
- IP address range in CIDR notation.
type: str
gateway:
description:
- IP gateway address.
type: str
aux_addresses:
description:
- Auxiliary IP addresses used by Network driver, as a mapping from hostname to IP.
type: dict
ipam_config:
description:
- List of IPAM config blocks. Consult
L(Docker docs,https://docs.docker.com/compose/compose-file/compose-file-v2/#ipam) for valid options and values.
Note that I(iprange) is spelled differently here (we use the notation from the Docker SDK for Python).
type: list
elements: dict
suboptions:
subnet:
description:
- IP subset in CIDR notation.
type: str
iprange:
description:
- IP address range in CIDR notation.
type: str
gateway:
description:
- IP gateway address.
type: str
aux_addresses:
description:
- Auxiliary IP addresses used by Network driver, as a mapping from hostname to IP.
type: dict
state:
description:
- C(absent) deletes the network. If a network has connected containers, it
cannot be deleted. Use the I(force) option to disconnect all containers
and delete the network.
- C(present) creates the network, if it does not already exist with the
specified parameters, and connects the list of containers provided via
the connected parameter. Containers not on the list will be disconnected.
An empty list will leave no containers connected to the network. Use the
I(appends) option to leave existing containers connected. Use the I(force)
options to force re-creation of the network.
type: str
default: present
choices:
- absent
- present
internal:
description:
- Restrict external access to the network.
type: bool
labels:
description:
- Dictionary of labels.
type: dict
scope:
description:
- Specify the network's scope.
type: str
choices:
- local
- global
- swarm
attachable:
description:
- If enabled, and the network is in the global scope, non-service containers on worker nodes will be able to connect to the network.
type: bool
extends_documentation_fragment:
- community.general.docker
- community.general.docker.docker_py_1_documentation
notes:
- When network options are changed, the module disconnects all containers from the network, deletes the network, and re-creates the network.
It does not try to reconnect containers, except the ones listed in (I(connected), and even for these, it does not consider specific
connection options like fixed IP addresses or MAC addresses. If you need more control over how the containers are connected to the
network, loop the M(community.general.docker_container) module to loop over your containers to make sure they are connected properly.
- The module does not support Docker Swarm, i.e. it will not try to disconnect or reconnect services. If services are connected to the
network, deleting the network will fail. When network options are changed, the network has to be deleted and recreated, so this will
fail as well.
author:
- "Ben Keith (@keitwb)"
- "Chris Houseknecht (@chouseknecht)"
- "Dave Bendit (@DBendit)"
requirements:
- "L(Docker SDK for Python,https://docker-py.readthedocs.io/en/stable/) >= 1.10.0 (use L(docker-py,https://pypi.org/project/docker-py/) for Python 2.6)"
- "The docker server >= 1.10.0"
'''
EXAMPLES = '''
- name: Create a network
community.general.docker_network:
name: network_one
- name: Remove all but selected list of containers
community.general.docker_network:
name: network_one
connected:
- container_a
- container_b
- container_c
- name: Remove a single container
community.general.docker_network:
name: network_one
connected: "{{ fulllist|difference(['container_a']) }}"
- name: Add a container to a network, leaving existing containers connected
community.general.docker_network:
name: network_one
connected:
- container_a
appends: yes
- name: Create a network with driver options
community.general.docker_network:
name: network_two
driver_options:
com.docker.network.bridge.name: net2
- name: Create a network with custom IPAM config
community.general.docker_network:
name: network_three
ipam_config:
- subnet: 172.3.27.0/24
gateway: 172.3.27.2
iprange: 172.3.27.0/26
aux_addresses:
host1: 172.3.27.3
host2: 172.3.27.4
- name: Create a network with labels
community.general.docker_network:
name: network_four
labels:
key1: value1
key2: value2
- name: Create a network with IPv6 IPAM config
community.general.docker_network:
name: network_ipv6_one
enable_ipv6: yes
ipam_config:
- subnet: fdd1:ac8c:0557:7ce1::/64
- name: Create a network with IPv6 and custom IPv4 IPAM config
community.general.docker_network:
name: network_ipv6_two
enable_ipv6: yes
ipam_config:
- subnet: 172.4.27.0/24
- subnet: fdd1:ac8c:0557:7ce2::/64
- name: Delete a network, disconnecting all containers
community.general.docker_network:
name: network_one
state: absent
force: yes
'''
RETURN = '''
network:
description:
- Network inspection results for the affected network.
- Note that facts are part of the registered vars since Ansible 2.8. For compatibility reasons, the facts
are also accessible directly as C(docker_network). Note that the returned fact will be removed in community.general 2.0.0.
returned: success
type: dict
sample: {}
'''
import re
import traceback
from distutils.version import LooseVersion
from ansible_collections.community.general.plugins.module_utils.docker.common import (
AnsibleDockerClient,
DockerBaseClass,
docker_version,
DifferenceTracker,
clean_dict_booleans_for_docker_api,
RequestException,
)
try:
from docker import utils
from docker.errors import DockerException
if LooseVersion(docker_version) >= LooseVersion('2.0.0'):
from docker.types import IPAMPool, IPAMConfig
except Exception:
# missing Docker SDK for Python handled in ansible.module_utils.docker.common
pass
class TaskParameters(DockerBaseClass):
def __init__(self, client):
super(TaskParameters, self).__init__()
self.client = client
self.name = None
self.connected = None
self.driver = None
self.driver_options = None
self.ipam_driver = None
self.ipam_driver_options = None
self.ipam_options = None
self.ipam_config = None
self.appends = None
self.force = None
self.internal = None
self.labels = None
self.debug = None
self.enable_ipv6 = None
self.scope = None
self.attachable = None
for key, value in client.module.params.items():
setattr(self, key, value)
def container_names_in_network(network):
return [c['Name'] for c in network['Containers'].values()] if network['Containers'] else []
CIDR_IPV4 = re.compile(r'^([0-9]{1,3}\.){3}[0-9]{1,3}/([0-9]|[1-2][0-9]|3[0-2])$')
CIDR_IPV6 = re.compile(r'^[0-9a-fA-F:]+/([0-9]|[1-9][0-9]|1[0-2][0-9])$')
def validate_cidr(cidr):
"""Validate CIDR. Return IP version of a CIDR string on success.
:param cidr: Valid CIDR
:type cidr: str
:return: ``ipv4`` or ``ipv6``
:rtype: str
:raises ValueError: If ``cidr`` is not a valid CIDR
"""
if CIDR_IPV4.match(cidr):
return 'ipv4'
elif CIDR_IPV6.match(cidr):
return 'ipv6'
raise ValueError('"{0}" is not a valid CIDR'.format(cidr))
def normalize_ipam_config_key(key):
"""Normalizes IPAM config keys returned by Docker API to match Ansible keys.
:param key: Docker API key
:type key: str
:return Ansible module key
:rtype str
"""
special_cases = {
'AuxiliaryAddresses': 'aux_addresses'
}
return special_cases.get(key, key.lower())
def dicts_are_essentially_equal(a, b):
"""Make sure that a is a subset of b, where None entries of a are ignored."""
for k, v in a.items():
if v is None:
continue
if b.get(k) != v:
return False
return True
class DockerNetworkManager(object):
def __init__(self, client):
self.client = client
self.parameters = TaskParameters(client)
self.check_mode = self.client.check_mode
self.results = {
u'changed': False,
u'actions': []
}
self.diff = self.client.module._diff
self.diff_tracker = DifferenceTracker()
self.diff_result = dict()
self.existing_network = self.get_existing_network()
if not self.parameters.connected and self.existing_network:
self.parameters.connected = container_names_in_network(self.existing_network)
if (self.parameters.ipam_options['subnet'] or self.parameters.ipam_options['iprange'] or
self.parameters.ipam_options['gateway'] or self.parameters.ipam_options['aux_addresses']):
self.parameters.ipam_config = [self.parameters.ipam_options]
if self.parameters.ipam_config:
try:
for ipam_config in self.parameters.ipam_config:
validate_cidr(ipam_config['subnet'])
except ValueError as e:
self.client.fail(str(e))
if self.parameters.driver_options:
|
state = self.parameters.state
if state == 'present':
self.present()
elif state == 'absent':
self.absent()
if self.diff or self.check_mode or self.parameters.debug:
if self.diff:
self.diff_result['before'], self.diff_result['after'] = self.diff_tracker.get_before_after()
self.results['diff'] = self.diff_result
def get_existing_network(self):
return self.client.get_network(name=self.parameters.name)
def has_different_config(self, net):
'''
Evaluates an existing network and returns a tuple containing a boolean
indicating if the configuration is different and a list of differences.
:param net: the inspection output for an existing network
:return: (bool, list)
'''
differences = DifferenceTracker()
if self.parameters.driver and self.parameters.driver != net['Driver']:
differences.add('driver',
parameter=self.parameters.driver,
active=net['Driver'])
if self.parameters.driver_options:
if not net.get('Options'):
differences.add('driver_options',
parameter=self.parameters.driver_options,
active=net.get('Options'))
else:
for key, value in self.parameters.driver_options.items():
if not (key in net['Options']) or value != net['Options'][key]:
differences.add('driver_options.%s' % key,
parameter=value,
active=net['Options'].get(key))
if self.parameters.ipam_driver:
if not net.get('IPAM') or net['IPAM']['Driver'] != self.parameters.ipam_driver:
differences.add('ipam_driver',
parameter=self.parameters.ipam_driver,
active=net.get('IPAM'))
if self.parameters.ipam_driver_options is not None:
ipam_driver_options = net['IPAM'].get('Options') or {}
if ipam_driver_options != self.parameters.ipam_driver_options:
differences.add('ipam_driver_options',
parameter=self.parameters.ipam_driver_options,
active=ipam_driver_options)
if self.parameters.ipam_config is not None and self.parameters.ipam_config:
if not net.get('IPAM') or not net['IPAM']['Config']:
differences.add('ipam_config',
parameter=self.parameters.ipam_config,
active=net.get('IPAM', {}).get('Config'))
else:
# Put network's IPAM config into the same format as module's IPAM config
net_ipam_configs = []
for net_ipam_config in net['IPAM']['Config']:
config = dict()
for k, v in net_ipam_config.items():
config[normalize_ipam_config_key(k)] = v
net_ipam_configs.append(config)
# Compare lists of dicts as sets of dicts
for idx, ipam_config in enumerate(self.parameters.ipam_config):
net_config = dict()
for net_ipam_config in net_ipam_configs:
if dicts_are_essentially_equal(ipam_config, net_ipam_config):
net_config = net_ipam_config
break
for key, value in ipam_config.items():
if value is None:
# due to recursive argument_spec, all keys are always present
# (but have default value None if not specified)
continue
if value != net_config.get(key):
differences.add('ipam_config[%s].%s' % (idx, key),
parameter=value,
active=net_config.get(key))
if self.parameters.enable_ipv6 is not None and self.parameters.enable_ipv6 != net.get('EnableIPv6', False):
differences.add('enable_ipv6',
parameter=self.parameters.enable_ipv6,
active=net.get('EnableIPv6', False))
if self.parameters.internal is not None and self.parameters.internal != net.get('Internal', False):
differences.add('internal',
parameter=self.parameters.internal,
active=net.get('Internal'))
if self.parameters.scope is not None and self.parameters.scope != net.get('Scope'):
differences.add('scope',
parameter=self.parameters.scope,
active=net.get('Scope'))
if self.parameters.attachable is not None and self.parameters.attachable != net.get('Attachable', False):
differences.add('attachable',
parameter=self.parameters.attachable,
active=net.get('Attachable'))
if self.parameters.labels:
if not net.get('Labels'):
differences.add('labels',
parameter=self.parameters.labels,
active=net.get('Labels'))
else:
for key, value in self.parameters.labels.items():
if not (key in net['Labels']) or value != net['Labels'][key]:
differences.add('labels.%s' % key,
parameter=value,
active=net['Labels'].get(key))
return not differences.empty, differences
def create_network(self):
if not self.existing_network:
params = dict(
driver=self.parameters.driver,
options=self.parameters.driver_options,
)
ipam_pools = []
if self.parameters.ipam_config:
for ipam_pool in self.parameters.ipam_config:
if LooseVersion(docker_version) >= LooseVersion('2.0.0'):
ipam_pools.append(IPAMPool(**ipam_pool))
else:
ipam_pools.append(utils.create_ipam_pool(**ipam_pool))
if self.parameters.ipam_driver or self.parameters.ipam_driver_options or ipam_pools:
# Only add ipam parameter if a driver was specified or if IPAM parameters
# were specified. Leaving this parameter away can significantly speed up
# creation; on my machine creation with this option needs ~15 seconds,
# and without just a few seconds.
if LooseVersion(docker_version) >= LooseVersion('2.0.0'):
params['ipam'] = IPAMConfig(driver=self.parameters.ipam_driver,
pool_configs=ipam_pools,
options=self.parameters.ipam_driver_options)
else:
params['ipam'] = utils.create_ipam_config(driver=self.parameters.ipam_driver,
pool_configs=ipam_pools)
if self.parameters.enable_ipv6 is not None:
params['enable_ipv6'] = self.parameters.enable_ipv6
if self.parameters.internal is not None:
params['internal'] = self.parameters.internal
if self.parameters.scope is not None:
params['scope'] = self.parameters.scope
if self.parameters.attachable is not None:
params['attachable'] = self.parameters.attachable
if self.parameters.labels:
params['labels'] = self.parameters.labels
if not self.check_mode:
resp = self.client.create_network(self.parameters.name, **params)
self.client.report_warnings(resp, ['Warning'])
self.existing_network = self.client.get_network(network_id=resp['Id'])
self.results['actions'].append("Created network %s with driver %s" % (self.parameters.name, self.parameters.driver))
self.results['changed'] = True
def remove_network(self):
if self.existing_network:
self.disconnect_all_containers()
if not self.check_mode:
self.client.remove_network(self.parameters.name)
self.results['actions'].append("Removed network %s" % (self.parameters.name,))
self.results['changed'] = True
def is_container_connected(self, container_name):
if not self.existing_network:
return False
return container_name in container_names_in_network(self.existing_network)
def connect_containers(self):
for name in self.parameters.connected:
if not self.is_container_connected(name):
if not self.check_mode:
self.client.connect_container_to_network(name, self.parameters.name)
self.results['actions'].append("Connected container %s" % (name,))
self.results['changed'] = True
self.diff_tracker.add('connected.{0}'.format(name),
parameter=True,
active=False)
def disconnect_missing(self):
if not self.existing_network:
return
containers = self.existing_network['Containers']
if not containers:
return
for c in containers.values():
name = c['Name']
if name not in self.parameters.connected:
self.disconnect_container(name)
def disconnect_all_containers(self):
containers = self.client.get_network(name=self.parameters.name)['Containers']
if not containers:
return
for cont in containers.values():
self.disconnect_container(cont['Name'])
def disconnect_container(self, container_name):
if not self.check_mode:
self.client.disconnect_container_from_network(container_name, self.parameters.name)
self.results['actions'].append("Disconnected container %s" % (container_name,))
self.results['changed'] = True
self.diff_tracker.add('connected.{0}'.format(container_name),
parameter=False,
active=True)
def present(self):
different = False
differences = DifferenceTracker()
if self.existing_network:
different, differences = self.has_different_config(self.existing_network)
self.diff_tracker.add('exists', parameter=True, active=self.existing_network is not None)
if self.parameters.force or different:
self.remove_network()
self.existing_network = None
self.create_network()
self.connect_containers()
if not self.parameters.appends:
self.disconnect_missing()
if self.diff or self.check_mode or self.parameters.debug:
self.diff_result['differences'] = differences.get_legacy_docker_diffs()
self.diff_tracker.merge(differences)
if not self.check_mode and not self.parameters.debug:
self.results.pop('actions')
network_facts = self.get_existing_network()
self.results['ansible_facts'] = {u'docker_network': network_facts}
self.results['network'] = network_facts
def absent(self):
self.diff_tracker.add('exists', parameter=False, active=self.existing_network is not None)
self.remove_network()
def main():
argument_spec = dict(
name=dict(type='str', required=True, aliases=['network_name']),
connected=dict(type='list', default=[], elements='str', aliases=['containers']),
state=dict(type='str', default='present', choices=['present', 'absent']),
driver=dict(type='str', default='bridge'),
driver_options=dict(type='dict', default={}),
force=dict(type='bool', default=False),
appends=dict(type='bool', default=False, aliases=['incremental']),
ipam_driver=dict(type='str'),
ipam_driver_options=dict(type='dict'),
ipam_options=dict(type='dict', default={}, options=dict(
subnet=dict(type='str'),
iprange=dict(type='str'),
gateway=dict(type='str'),
aux_addresses=dict(type='dict'),
), removed_in_version='2.0.0', removed_from_collection='community.general'), # was Ansible 2.12
ipam_config=dict(type='list', elements='dict', options=dict(
subnet=dict(type='str'),
iprange=dict(type='str'),
gateway=dict(type='str'),
aux_addresses=dict(type='dict'),
)),
enable_ipv6=dict(type='bool'),
internal=dict(type='bool'),
labels=dict(type='dict', default={}),
debug=dict(type='bool', default=False),
scope=dict(type='str', choices=['local', 'global', 'swarm']),
attachable=dict(type='bool'),
)
mutually_exclusive = [
('ipam_config', 'ipam_options')
]
option_minimal_versions = dict(
scope=dict(docker_py_version='2.6.0', docker_api_version='1.30'),
attachable=dict(docker_py_version='2.0.0', docker_api_version='1.26'),
labels=dict(docker_api_version='1.23'),
ipam_driver_options=dict(docker_py_version='2.0.0'),
)
client = AnsibleDockerClient(
argument_spec=argument_spec,
mutually_exclusive=mutually_exclusive,
supports_check_mode=True,
min_docker_version='1.10.0',
min_docker_api_version='1.22',
# "The docker server >= 1.10.0"
option_minimal_versions=option_minimal_versions,
)
try:
cm = DockerNetworkManager(client)
client.module.exit_json(**cm.results)
except DockerException as e:
client.fail('An unexpected docker error occurred: {0}'.format(e), exception=traceback.format_exc())
except RequestException as e:
client.fail('An unexpected requests error occurred when docker-py tried to talk to the docker daemon: {0}'.format(e), exception=traceback.format_exc())
if __name__ == '__main__':
main()
| self.parameters.driver_options = clean_dict_booleans_for_docker_api(self.parameters.driver_options) |
forms.py | from flask_wtf import FlaskForm
from wtforms import StringField, SubmitField
from wtforms.validators import DataRequired, Length, ValidationError
class SearchForm(FlaskForm):
search_query = StringField('What cuisine are you in the mood for today?', validators=[DataRequired(),
Length(3, 20, "Must be longer than 3 characters and under 20")],
render_kw=({'placeholder': "Enter a cuisine (e.g. Nepali, Thai, etc)"}))
submit = SubmitField('Search')
class | (FlaskForm):
surprise_me_button = SubmitField("Surprise Me") | SurpriseForm |
player.js | /*! @license
* Shaka Player
* Copyright 2016 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
goog.provide('shaka.Player');
goog.require('goog.asserts');
goog.require('shaka.Deprecate');
goog.require('shaka.log');
goog.require('shaka.media.AdaptationSetCriteria');
goog.require('shaka.media.BufferingObserver');
goog.require('shaka.media.DrmEngine');
goog.require('shaka.media.ExampleBasedCriteria');
goog.require('shaka.media.ManifestParser');
goog.require('shaka.media.MediaSourceEngine');
goog.require('shaka.media.MediaSourcePlayhead');
goog.require('shaka.media.MetaSegmentIndex');
goog.require('shaka.media.ClosedCaptionParser');
goog.require('shaka.media.PlayRateController');
goog.require('shaka.media.Playhead');
goog.require('shaka.media.PlayheadObserverManager');
goog.require('shaka.media.PreferenceBasedCriteria');
goog.require('shaka.media.RegionObserver');
goog.require('shaka.media.RegionTimeline');
goog.require('shaka.media.SegmentIndex');
goog.require('shaka.media.SrcEqualsPlayhead');
goog.require('shaka.media.StreamingEngine');
goog.require('shaka.media.TimeRangesUtils');
goog.require('shaka.net.NetworkingEngine');
goog.require('shaka.routing.Walker');
goog.require('shaka.text.SimpleTextDisplayer');
goog.require('shaka.text.TextEngine');
goog.require('shaka.text.UITextDisplayer');
goog.require('shaka.text.WebVttGenerator');
goog.require('shaka.util.AbortableOperation');
goog.require('shaka.util.BufferUtils');
goog.require('shaka.util.ConfigUtils');
goog.require('shaka.util.Error');
goog.require('shaka.util.EventManager');
goog.require('shaka.util.FakeEvent');
goog.require('shaka.util.FakeEventTarget');
goog.require('shaka.util.Functional');
goog.require('shaka.util.IDestroyable');
goog.require('shaka.util.LanguageUtils');
goog.require('shaka.util.ManifestParserUtils');
goog.require('shaka.util.MediaReadyState');
goog.require('shaka.util.MimeUtils');
goog.require('shaka.util.ObjectUtils');
goog.require('shaka.util.Platform');
goog.require('shaka.util.PlayerConfiguration');
goog.require('shaka.util.PublicPromise');
goog.require('shaka.util.Stats');
goog.require('shaka.util.StreamUtils');
goog.require('shaka.util.Timer');
goog.requireType('shaka.media.IClosedCaptionParser');
goog.requireType('shaka.media.PresentationTimeline');
goog.requireType('shaka.routing.Node');
goog.requireType('shaka.routing.Payload');
/**
* @event shaka.Player.ErrorEvent
* @description Fired when a playback error occurs.
* @property {string} type
* 'error'
* @property {!shaka.util.Error} detail
* An object which contains details on the error. The error's
* <code>category</code> and <code>code</code> properties will identify the
* specific error that occurred. In an uncompiled build, you can also use the
* <code>message</code> and <code>stack</code> properties to debug.
* @exportDoc
*/
/**
* @event shaka.Player.StateChangeEvent
* @description Fired when the player changes load states.
* @property {string} type
* 'onstatechange'
* @property {string} state
* The name of the state that the player just entered.
* @exportDoc
*/
/**
* @event shaka.Player.StateIdleEvent
* @description Fired when the player has stopped changing states and will
* remain idle until a new state change request (e.g. <code>load</code>,
* <code>attach</code>, etc.) is made.
* @property {string} type
* 'onstateidle'
* @property {string} state
* The name of the state that the player stopped in.
* @exportDoc
*/
/**
* @event shaka.Player.EmsgEvent
* @description Fired when a non-typical emsg is found in a segment.
* @property {string} type
* 'emsg'
* @property {shaka.extern.EmsgInfo} detail
* An object which contains the content of the emsg box.
* @exportDoc
*/
/**
* @event shaka.Player.DownloadFailed
* @description Fired when a download has failed, for any reason.
* 'downloadfailed'
* @property {!shaka.extern.Request} request
* @property {?shaka.util.Error} error
* @param {number} httpResponseCode
* @param {boolean} aborted
* @exportDoc
*/
/**
* @event shaka.Player.DownloadHeadersReceived
* @description Fired when the networking engine has received the headers for
* a download, but before the body has been downloaded.
* If the HTTP plugin being used does not track this information, this event
* will default to being fired when the body is received, instead.
* @property {!Object.<string, string>} headers
* @property {!shaka.extern.Request} request
* @property {!shaka.net.NetworkingEngine.RequestType} type
* 'downloadheadersreceived'
* @exportDoc
*/
/**
* @event shaka.Player.DrmSessionUpdateEvent
* @description Fired when the CDM has accepted the license response.
* @property {string} type
* 'drmsessionupdate'
* @exportDoc
*/
/**
* @event shaka.Player.TimelineRegionAddedEvent
* @description Fired when a media timeline region is added.
* @property {string} type
* 'timelineregionadded'
* @property {shaka.extern.TimelineRegionInfo} detail
* An object which contains a description of the region.
* @exportDoc
*/
/**
* @event shaka.Player.TimelineRegionEnterEvent
* @description Fired when the playhead enters a timeline region.
* @property {string} type
* 'timelineregionenter'
* @property {shaka.extern.TimelineRegionInfo} detail
* An object which contains a description of the region.
* @exportDoc
*/
/**
* @event shaka.Player.TimelineRegionExitEvent
* @description Fired when the playhead exits a timeline region.
* @property {string} type
* 'timelineregionexit'
* @property {shaka.extern.TimelineRegionInfo} detail
* An object which contains a description of the region.
* @exportDoc
*/
/**
* @event shaka.Player.BufferingEvent
* @description Fired when the player's buffering state changes.
* @property {string} type
* 'buffering'
* @property {boolean} buffering
* True when the Player enters the buffering state.
* False when the Player leaves the buffering state.
* @exportDoc
*/
/**
* @event shaka.Player.LoadingEvent
* @description Fired when the player begins loading. The start of loading is
* defined as when the user has communicated intent to load content (i.e.
* <code>Player.load</code> has been called).
* @property {string} type
* 'loading'
* @exportDoc
*/
/**
* @event shaka.Player.LoadedEvent
* @description Fired when the player ends the load.
* @property {string} type
* 'loaded'
* @exportDoc
*/
/**
* @event shaka.Player.UnloadingEvent
* @description Fired when the player unloads or fails to load.
* Used by the Cast receiver to determine idle state.
* @property {string} type
* 'unloading'
* @exportDoc
*/
/**
* @event shaka.Player.TextTrackVisibilityEvent
* @description Fired when text track visibility changes.
* @property {string} type
* 'texttrackvisibility'
* @exportDoc
*/
/**
* @event shaka.Player.TracksChangedEvent
* @description Fired when the list of tracks changes. For example, this will
* happen when new tracks are added/removed or when track restrictions change.
* @property {string} type
* 'trackschanged'
* @exportDoc
*/
/**
* @event shaka.Player.AdaptationEvent
* @description Fired when an automatic adaptation causes the active tracks
* to change. Does not fire when the application calls
* <code>selectVariantTrack()</code>, <code>selectTextTrack()</code>,
* <code>selectAudioLanguage()</code>, or <code>selectTextLanguage()</code>.
* @property {string} type
* 'adaptation'
* @property {shaka.extern.Track} oldTrack
* @property {shaka.extern.Track} newTrack
* @exportDoc
*/
/**
* @event shaka.Player.VariantChangedEvent
* @description Fired when a call from the application caused a variant change.
* Can be triggered by calls to <code>selectVariantTrack()</code> or
* <code>selectAudioLanguage()</code>. Does not fire when an automatic
* adaptation causes a variant change.
* @property {string} type
* 'variantchanged'
* @property {shaka.extern.Track} oldTrack
* @property {shaka.extern.Track} newTrack
* @exportDoc
*/
/**
* @event shaka.Player.TextChangedEvent
* @description Fired when a call from the application caused a text stream
* change. Can be triggered by calls to <code>selectTextTrack()</code> or
* <code>selectTextLanguage()</code>.
* @property {string} type
* 'textchanged'
* @exportDoc
*/
/**
* @event shaka.Player.ExpirationUpdatedEvent
* @description Fired when there is a change in the expiration times of an
* EME session.
* @property {string} type
* 'expirationupdated'
* @exportDoc
*/
/**
* @event shaka.Player.LargeGapEvent
* @description Fired when the playhead enters a large gap. If the
* <code>streaming.jumpLargeGaps</code> configuration is set, the default
* action of this event is to jump the gap; this can be prevented by calling
* <code>preventDefault()</code> on the event object.
* @property {string} type
* 'largegap'
* @property {number} currentTime
* The current time of the playhead.
* @property {number} gapSize
* The size of the gap, in seconds.
* @exportDoc
*/
/**
* @event shaka.Player.ManifestParsedEvent
* @description Fired after the manifest has been parsed, but before anything
* else happens. The manifest may contain streams that will be filtered out,
* at this stage of the loading process.
* @property {string} type
* 'manifestparsed'
* @exportDoc
*/
/**
* @event shaka.Player.MetadataEvent
* @description Triggers after metadata associated with the stream is found.
* Usually they are metadata of type ID3.
* @property {string} type
* 'metadata'
* @property {number} startTime
* The time that describes the beginning of the range of the metadata to
* which the cue applies.
* @property {?number} endTime
* The time that describes the end of the range of the metadata to which
* the cue applies.
* @property {string} metadataType
* Type of metadata. Eg: org.id3 or org.mp4ra
* @property {shaka.extern.ID3Metadata} payload
* The metadata itself
* @exportDoc
*/
/**
* @event shaka.Player.StreamingEvent
* @description Fired after the manifest has been parsed and track information
* is available, but before streams have been chosen and before any segments
* have been fetched. You may use this event to configure the player based on
* information found in the manifest.
* @property {string} type
* 'streaming'
* @exportDoc
*/
/**
* @event shaka.Player.AbrStatusChangedEvent
* @description Fired when the state of abr has been changed.
* (Enabled or disabled).
* @property {string} type
* 'abrstatuschanged'
* @property {boolean} newStatus
* The new status of the application. True for 'is enabled' and
* false otherwise.
* @exportDoc
*/
/**
* @event shaka.Player.RateChangeEvent
* @description Fired when the video's playback rate changes.
* This allows the PlayRateController to update it's internal rate field,
* before the UI updates playback button with the newest playback rate.
* @property {string} type
* 'ratechange'
* @exportDoc
*/
/**
* @event shaka.Player.SessionDataEvent
* @description Fired when the manifest parser find info about session data.
* Specification: https://tools.ietf.org/html/rfc8216#section-4.3.4.4
* @property {string} type
* 'sessiondata'
* @property {string} id
* The id of the session data.
* @property {string} uri
* The uri with the session data info.
* @property {string} language
* The language of the session data.
* @property {string} value
* The value of the session data.
* @exportDoc
*/
/**
* @summary The main player object for Shaka Player.
*
* @implements {shaka.util.IDestroyable}
* @export
*/
shaka.Player = class extends shaka.util.FakeEventTarget {
/**
* @param {HTMLMediaElement=} mediaElement
* When provided, the player will attach to <code>mediaElement</code>,
* similar to calling <code>attach</code>. When not provided, the player
* will remain detached.
* @param {function(shaka.Player)=} dependencyInjector Optional callback
* which is called to inject mocks into the Player. Used for testing.
*/
constructor(mediaElement, dependencyInjector) {
super();
/** @private {shaka.Player.LoadMode} */
this.loadMode_ = shaka.Player.LoadMode.NOT_LOADED;
/** @private {HTMLMediaElement} */
this.video_ = null;
/** @private {HTMLElement} */
this.videoContainer_ = null;
/**
* Since we may not always have a text displayer created (e.g. before |load|
* is called), we need to track what text visibility SHOULD be so that we
* can ensure that when we create the text displayer. When we create our
* text displayer, we will use this to show (or not show) text as per the
* user's requests.
*
* @private {boolean}
*/
this.isTextVisible_ = false;
/** @private {shaka.util.EventManager} */
this.eventManager_ = new shaka.util.EventManager();
/** @private {shaka.net.NetworkingEngine} */
this.networkingEngine_ = null;
/** @private {shaka.media.DrmEngine} */
this.drmEngine_ = null;
/** @private {shaka.media.MediaSourceEngine} */
this.mediaSourceEngine_ = null;
/** @private {shaka.media.Playhead} */
this.playhead_ = null;
/**
* The playhead observers are used to monitor the position of the playhead
* and some other source of data (e.g. buffered content), and raise events.
*
* @private {shaka.media.PlayheadObserverManager}
*/
this.playheadObservers_ = null;
/**
* This is our control over the playback rate of the media element. This
* provides the missing functionality that we need to provide trick play,
* for example a negative playback rate.
*
* @private {shaka.media.PlayRateController}
*/
this.playRateController_ = null;
// We use the buffering observer and timer to track when we move from having
// enough buffered content to not enough. They only exist when content has
// been loaded and are not re-used between loads.
/** @private {shaka.util.Timer} */
this.bufferPoller_ = null;
/** @private {shaka.media.BufferingObserver} */
this.bufferObserver_ = null;
/** @private {shaka.media.RegionTimeline} */
this.regionTimeline_ = null;
/** @private {shaka.media.StreamingEngine} */
this.streamingEngine_ = null;
/** @private {shaka.extern.ManifestParser} */
this.parser_ = null;
/** @private {?shaka.extern.ManifestParser.Factory} */ |
/** @private {?string} */
this.assetUri_ = null;
/** @private {shaka.extern.AbrManager} */
this.abrManager_ = null;
/**
* The factory that was used to create the abrManager_ instance.
* @private {?shaka.extern.AbrManager.Factory}
*/
this.abrManagerFactory_ = null;
/**
* Contains an ID for use with creating streams. The manifest parser should
* start with small IDs, so this starts with a large one.
* @private {number}
*/
this.nextExternalStreamId_ = 1e9;
/** @private {?shaka.extern.PlayerConfiguration} */
this.config_ = this.defaultConfig_();
/**
* The TextDisplayerFactory that was last used to make a text displayer.
* Stored so that we can tell if a new type of text displayer is desired.
* @private {?shaka.extern.TextDisplayer.Factory}
*/
this.lastTextFactory_;
/** @private {{width: number, height: number}} */
this.maxHwRes_ = {width: Infinity, height: Infinity};
/** @private {shaka.util.Stats} */
this.stats_ = null;
/** @private {!shaka.media.AdaptationSetCriteria} */
this.currentAdaptationSetCriteria_ =
new shaka.media.PreferenceBasedCriteria(
this.config_.preferredAudioLanguage,
this.config_.preferredVariantRole,
this.config_.preferredAudioChannelCount);
/** @private {string} */
this.currentTextLanguage_ = this.config_.preferredTextLanguage;
/** @private {string} */
this.currentTextRole_ = this.config_.preferredTextRole;
/** @private {boolean} */
this.currentTextForced_ = this.config_.preferForcedSubs;
/** @private {!Array.<function():(!Promise|undefined)>} */
this.cleanupOnUnload_ = [];
/**
* This playback start position will be used when
* <code>updateStartTime()</code> has been called to provide an updated
* start position during the media loading process.
*
* @private {?number}
*/
this.updatedStartTime_ = null;
if (dependencyInjector) {
dependencyInjector(this);
}
this.networkingEngine_ = this.createNetworkingEngine();
this.networkingEngine_.setForceHTTPS(this.config_.streaming.forceHTTPS);
/** @private {shaka.extern.IAdManager} */
this.adManager_ = null;
if (shaka.Player.adManagerFactory_) {
this.adManager_ =
shaka.util.Functional.callFactory(shaka.Player.adManagerFactory_);
}
// If the browser comes back online after being offline, then try to play
// again.
this.eventManager_.listen(window, 'online', () => {
this.retryStreaming();
});
/** @private {shaka.routing.Node} */
this.detachNode_ = {name: 'detach'};
/** @private {shaka.routing.Node} */
this.attachNode_ = {name: 'attach'};
/** @private {shaka.routing.Node} */
this.unloadNode_ = {name: 'unload'};
/** @private {shaka.routing.Node} */
this.parserNode_ = {name: 'manifest-parser'};
/** @private {shaka.routing.Node} */
this.manifestNode_ = {name: 'manifest'};
/** @private {shaka.routing.Node} */
this.mediaSourceNode_ = {name: 'media-source'};
/** @private {shaka.routing.Node} */
this.drmNode_ = {name: 'drm-engine'};
/** @private {shaka.routing.Node} */
this.loadNode_ = {name: 'load'};
/** @private {shaka.routing.Node} */
this.srcEqualsDrmNode_ = {name: 'src-equals-drm-engine'};
/** @private {shaka.routing.Node} */
this.srcEqualsNode_ = {name: 'src-equals'};
const AbortableOperation = shaka.util.AbortableOperation;
const actions = new Map();
actions.set(this.attachNode_, (has, wants) => {
return AbortableOperation.notAbortable(this.onAttach_(has, wants));
});
actions.set(this.detachNode_, (has, wants) => {
return AbortableOperation.notAbortable(this.onDetach_(has, wants));
});
actions.set(this.unloadNode_, (has, wants) => {
return AbortableOperation.notAbortable(this.onUnload_(has, wants));
});
actions.set(this.mediaSourceNode_, (has, wants) => {
const p = this.onInitializeMediaSourceEngine_(has, wants);
return AbortableOperation.notAbortable(p);
});
actions.set(this.parserNode_, (has, wants) => {
const p = this.onInitializeParser_(has, wants);
return AbortableOperation.notAbortable(p);
});
actions.set(this.manifestNode_, (has, wants) => {
// This action is actually abortable, so unlike the other callbacks, this
// one will return an abortable operation.
return this.onParseManifest_(has, wants);
});
actions.set(this.drmNode_, (has, wants) => {
const p = this.onInitializeDrm_(has, wants);
return AbortableOperation.notAbortable(p);
});
actions.set(this.loadNode_, (has, wants) => {
return AbortableOperation.notAbortable(this.onLoad_(has, wants));
});
actions.set(this.srcEqualsDrmNode_, (has, wants) => {
const p = this.onInitializeSrcEqualsDrm_(has, wants);
return AbortableOperation.notAbortable(p);
});
actions.set(this.srcEqualsNode_, (has, wants) => {
return this.onSrcEquals_(has, wants);
});
/** @private {shaka.routing.Walker.Implementation} */
const walkerImplementation = {
getNext: (at, has, goingTo, wants) => {
return this.getNextStep_(at, has, goingTo, wants);
},
enterNode: (node, has, wants) => {
this.dispatchEvent(this.makeEvent_(
/* name= */ shaka.Player.EventName.OnStateChange,
/* data= */ {'state': node.name}));
const action = actions.get(node);
return action(has, wants);
},
handleError: async (has, error) => {
shaka.log.warning('The walker saw an error:');
if (error instanceof shaka.util.Error) {
shaka.log.warning('Error Code:', error.code);
} else {
shaka.log.warning('Error Message:', error.message);
shaka.log.warning('Error Stack:', error.stack);
}
// Regardless of what state we were in, if there is an error, we unload.
// This ensures that any initialized system will be torn-down and we
// will go back to a safe foundation. We assume that the media element
// is always safe to use after an error.
await this.onUnload_(has, shaka.Player.createEmptyPayload_());
// There are only two nodes that come before we start loading content,
// attach and detach. If we have a media element, it means we were
// attached to the element, and we can safely return to the attach state
// (we assume that the video element is always re-usable). We favor
// returning to the attach node since it means that the app won't need
// to re-attach if it saw an error.
return has.mediaElement ? this.attachNode_ : this.detachNode_;
},
onIdle: (node) => {
this.dispatchEvent(this.makeEvent_(
/* name= */ shaka.Player.EventName.OnStateIdle,
/* data= */ {'state': node.name}));
},
};
/** @private {shaka.routing.Walker} */
this.walker_ = new shaka.routing.Walker(
this.detachNode_,
shaka.Player.createEmptyPayload_(),
walkerImplementation);
// Even though |attach| will start in later interpreter cycles, it should be
// the LAST thing we do in the constructor because conceptually it relies on
// player having been initialized.
if (mediaElement) {
this.attach(mediaElement, /* initializeMediaSource= */ true);
}
}
/**
* @param {!shaka.Player.EventName} name
* @param {Object=} data
* @return {!shaka.util.FakeEvent}
* @private
*/
makeEvent_(name, data) {
return new shaka.util.FakeEvent(name, data);
}
/**
* After destruction, a Player object cannot be used again.
*
* @override
* @export
*/
async destroy() {
// Make sure we only execute the destroy logic once.
if (this.loadMode_ == shaka.Player.LoadMode.DESTROYED) {
return;
}
// Mark as "dead". This should stop external-facing calls from changing our
// internal state any more. This will stop calls to |attach|, |detach|, etc.
// from interrupting our final move to the detached state.
this.loadMode_ = shaka.Player.LoadMode.DESTROYED;
// Because we have set |loadMode_| to |DESTROYED| we can't call |detach|. We
// must talk to |this.walker_| directly.
const events = this.walker_.startNewRoute((currentPayload) => {
return {
node: this.detachNode_,
payload: shaka.Player.createEmptyPayload_(),
interruptible: false,
};
});
// Wait until the detach has finished so that we don't interrupt it by
// calling |destroy| on |this.walker_|. To avoid failing here, we always
// resolve the promise.
await new Promise((resolve) => {
events.onStart = () => {
shaka.log.info('Preparing to destroy walker...');
};
events.onEnd = () => {
resolve();
};
events.onCancel = () => {
goog.asserts.assert(false,
'Our final detach call should never be cancelled.');
resolve();
};
events.onError = () => {
goog.asserts.assert(false,
'Our final detach call should never see an error');
resolve();
};
events.onSkip = () => {
goog.asserts.assert(false,
'Our final detach call should never be skipped');
resolve();
};
});
await this.walker_.destroy();
// Tear-down the event manager to ensure messages stop moving around.
if (this.eventManager_) {
this.eventManager_.release();
this.eventManager_ = null;
}
this.abrManagerFactory_ = null;
this.abrManager_ = null;
this.config_ = null;
this.stats_ = null;
this.videoContainer_ = null;
if (this.networkingEngine_) {
await this.networkingEngine_.destroy();
this.networkingEngine_ = null;
}
}
/**
* Registers a plugin callback that will be called with
* <code>support()</code>. The callback will return the value that will be
* stored in the return value from <code>support()</code>.
*
* @param {string} name
* @param {function():*} callback
* @export
*/
static registerSupportPlugin(name, callback) {
shaka.Player.supportPlugins_[name] = callback;
}
/**
* Set a factory to create an ad manager during player construction time.
* This method needs to be called bafore instantiating the Player class.
*
* @param {!shaka.extern.IAdManager.Factory} factory
* @export
*/
static setAdManagerFactory(factory) {
shaka.Player.adManagerFactory_ = factory;
}
/**
* Return whether the browser provides basic support. If this returns false,
* Shaka Player cannot be used at all. In this case, do not construct a
* Player instance and do not use the library.
*
* @return {boolean}
* @export
*/
static isBrowserSupported() {
if (!window.Promise) {
shaka.log.alwaysWarn('A Promise implementation or polyfill is required');
}
if (!window.TextDecoder || !window.TextEncoder) {
shaka.log.alwaysWarn(
'A TextDecoder/TextEncoder implementation or polyfill is required');
}
// Basic features needed for the library to be usable.
const basicSupport = !!window.Promise && !!window.Uint8Array &&
!!window.TextDecoder && !!window.TextEncoder &&
// eslint-disable-next-line no-restricted-syntax
!!Array.prototype.forEach;
if (!basicSupport) {
return false;
}
// We do not support IE
if (shaka.util.Platform.isIE()) {
return false;
}
// We do not support iOS 9, 10, or 11, nor those same versions of desktop
// Safari.
const safariVersion = shaka.util.Platform.safariVersion();
if (safariVersion && safariVersion < 12) {
return false;
}
// DRM support is not strictly necessary, but the APIs at least need to be
// there. Our no-op DRM polyfill should handle that.
// TODO(#1017): Consider making even DrmEngine optional.
const drmSupport = shaka.media.DrmEngine.isBrowserSupported();
if (!drmSupport) {
return false;
}
// If we have MediaSource (MSE) support, we should be able to use Shaka.
if (shaka.util.Platform.supportsMediaSource()) {
return true;
}
// If we don't have MSE, we _may_ be able to use Shaka. Look for native HLS
// support, and call this platform usable if we have it.
return shaka.util.Platform.supportsMediaType('application/x-mpegurl');
}
/**
* Probes the browser to determine what features are supported. This makes a
* number of requests to EME/MSE/etc which may result in user prompts. This
* should only be used for diagnostics.
*
* <p>
* NOTE: This may show a request to the user for permission.
*
* @see https://bit.ly/2ywccmH
* @param {boolean=} promptsOkay
* @return {!Promise.<shaka.extern.SupportType>}
* @export
*/
static async probeSupport(promptsOkay=true) {
goog.asserts.assert(shaka.Player.isBrowserSupported(),
'Must have basic support');
let drm = {};
if (promptsOkay) {
drm = await shaka.media.DrmEngine.probeSupport();
}
const manifest = shaka.media.ManifestParser.probeSupport();
const media = shaka.media.MediaSourceEngine.probeSupport();
const ret = {
manifest: manifest,
media: media,
drm: drm,
};
const plugins = shaka.Player.supportPlugins_;
for (const name in plugins) {
ret[name] = plugins[name]();
}
return ret;
}
/**
* Tell the player to use <code>mediaElement</code> for all <code>load</code>
* requests until <code>detach</code> or <code>destroy</code> are called.
*
* <p>
* Calling <code>attach</code> with <code>initializedMediaSource=true</code>
* will tell the player to take the initial load step and initialize media
* source.
*
* <p>
* Calls to <code>attach</code> will interrupt any in-progress calls to
* <code>load</code> but cannot interrupt calls to <code>attach</code>,
* <code>detach</code>, or <code>unload</code>.
*
* @param {!HTMLMediaElement} mediaElement
* @param {boolean=} initializeMediaSource
* @return {!Promise}
* @export
*/
attach(mediaElement, initializeMediaSource = true) {
// Do not allow the player to be used after |destroy| is called.
if (this.loadMode_ == shaka.Player.LoadMode.DESTROYED) {
return Promise.reject(this.createAbortLoadError_());
}
const payload = shaka.Player.createEmptyPayload_();
payload.mediaElement = mediaElement;
// If the platform does not support media source, we will never want to
// initialize media source.
if (!shaka.util.Platform.supportsMediaSource()) {
initializeMediaSource = false;
}
const destination = initializeMediaSource ?
this.mediaSourceNode_ :
this.attachNode_;
// Do not allow this route to be interrupted because calls after this attach
// call will depend on the media element being attached.
const events = this.walker_.startNewRoute((currentPayload) => {
return {
node: destination,
payload: payload,
interruptible: false,
};
});
// List to the events that can occur with our request.
events.onStart = () => shaka.log.info('Starting attach...');
return this.wrapWalkerListenersWithPromise_(events);
}
/**
* Tell the player to stop using its current media element. If the player is:
* <ul>
* <li>detached, this will do nothing,
* <li>attached, this will release the media element,
* <li>loading, this will abort loading, unload, and release the media
* element,
* <li>playing content, this will stop playback, unload, and release the
* media element.
* </ul>
*
* <p>
* Calls to <code>detach</code> will interrupt any in-progress calls to
* <code>load</code> but cannot interrupt calls to <code>attach</code>,
* <code>detach</code>, or <code>unload</code>.
*
* @return {!Promise}
* @export
*/
detach() {
// Do not allow the player to be used after |destroy| is called.
if (this.loadMode_ == shaka.Player.LoadMode.DESTROYED) {
return Promise.reject(this.createAbortLoadError_());
}
// Tell the walker to go "detached", but do not allow it to be interrupted.
// If it could be interrupted it means that our media element could fall out
// of sync.
const events = this.walker_.startNewRoute((currentPayload) => {
return {
node: this.detachNode_,
payload: shaka.Player.createEmptyPayload_(),
interruptible: false,
};
});
events.onStart = () => shaka.log.info('Starting detach...');
return this.wrapWalkerListenersWithPromise_(events);
}
/**
* Tell the player to either return to:
* <ul>
* <li>detached (when it does not have a media element),
* <li>attached (when it has a media element and
* <code>initializedMediaSource=false</code>)
* <li>media source initialized (when it has a media element and
* <code>initializedMediaSource=true</code>)
* </ul>
*
* <p>
* Calls to <code>unload</code> will interrupt any in-progress calls to
* <code>load</code> but cannot interrupt calls to <code>attach</code>,
* <code>detach</code>, or <code>unload</code>.
*
* @param {boolean=} initializeMediaSource
* @return {!Promise}
* @export
*/
unload(initializeMediaSource = true) {
// Do not allow the player to be used after |destroy| is called.
if (this.loadMode_ == shaka.Player.LoadMode.DESTROYED) {
return Promise.reject(this.createAbortLoadError_());
}
// If the platform does not support media source, we will never want to
// initialize media source.
if (!shaka.util.Platform.supportsMediaSource()) {
initializeMediaSource = false;
}
// Since we are going either to attached or detached (through unloaded), we
// can't allow it to be interrupted or else we could lose track of what
// media element we are suppose to use.
//
// Using the current payload, we can determine which node we want to go to.
// If we have a media element, we want to go back to attached. If we have no
// media element, we want to go back to detached.
const payload = shaka.Player.createEmptyPayload_();
const events = this.walker_.startNewRoute((currentPayload) => {
// When someone calls |unload| we can either be before attached or
// detached (there is nothing stopping someone from calling |detach| when
// we are already detached).
//
// If we are attached to the correct element, we can tear down the
// previous playback components and go to the attached media source node
// depending on whether or not the caller wants to pre-init media source.
//
// If we don't have a media element, we assume that we are already at the
// detached node - but only the walker knows that. To ensure we are
// actually there, we tell the walker to go to detach. While this is
// technically unnecessary, it ensures that we are in the state we want
// to be in and ready for the next request.
let destination = null;
if (currentPayload.mediaElement && initializeMediaSource) {
destination = this.mediaSourceNode_;
} else if (currentPayload.mediaElement) {
destination = this.attachNode_;
} else {
destination = this.detachNode_;
}
goog.asserts.assert(destination, 'We should have picked a destination.');
// Copy over the media element because we want to keep using the same
// element - the other values don't matter.
payload.mediaElement = currentPayload.mediaElement;
return {
node: destination,
payload: payload,
interruptible: false,
};
});
events.onStart = () => shaka.log.info('Starting unload...');
return this.wrapWalkerListenersWithPromise_(events);
}
/**
* Provides a way to update the stream start position during the media loading
* process. Can for example be called from the <code>manifestparsed</code>
* event handler to update the start position based on information in the
* manifest.
*
* @param {number} startTime
* @export
*/
updateStartTime(startTime) {
this.updatedStartTime_ = startTime;
}
/**
* Tell the player to load the content at <code>assetUri</code> and start
* playback at <code>startTime</code>. Before calling <code>load</code>,
* a call to <code>attach</code> must have succeeded.
*
* <p>
* Calls to <code>load</code> will interrupt any in-progress calls to
* <code>load</code> but cannot interrupt calls to <code>attach</code>,
* <code>detach</code>, or <code>unload</code>.
*
* @param {string} assetUri
* @param {?number=} startTime
* When <code>startTime</code> is <code>null</code> or
* <code>undefined</code>, playback will start at the default start time (0
* for VOD and liveEdge for LIVE).
* @param {string=} mimeType
* @return {!Promise}
* @export
*/
load(assetUri, startTime, mimeType) {
this.updatedStartTime_ = null;
// Do not allow the player to be used after |destroy| is called.
if (this.loadMode_ == shaka.Player.LoadMode.DESTROYED) {
return Promise.reject(this.createAbortLoadError_());
}
// We dispatch the loading event when someone calls |load| because we want
// to surface the user intent.
this.dispatchEvent(this.makeEvent_(shaka.Player.EventName.Loading));
// Right away we know what the asset uri and start-of-load time are. We will
// fill-in the rest of the information later.
const payload = shaka.Player.createEmptyPayload_();
payload.uri = assetUri;
payload.startTimeOfLoad = Date.now() / 1000;
if (mimeType) {
payload.mimeType = mimeType;
}
// Because we allow |startTime| to be optional, it means that it will be
// |undefined| when not provided. This means that we need to re-map
// |undefined| to |null| while preserving |0| as a meaningful value.
if (startTime !== undefined) {
payload.startTime = startTime;
}
// TODO: Refactor to determine whether it's a manifest or not, and whether
// or not we can play it. Then we could return a better error than
// UNABLE_TO_GUESS_MANIFEST_TYPE for WebM in Safari.
const useSrcEquals = this.shouldUseSrcEquals_(payload);
const destination = useSrcEquals ? this.srcEqualsNode_ : this.loadNode_;
// Allow this request to be interrupted, this will allow other requests to
// cancel a load and quickly start a new load.
const events = this.walker_.startNewRoute((currentPayload) => {
if (currentPayload.mediaElement == null) {
// Because we return null, this "new route" will not be used.
return null;
}
// Keep using whatever media element we have right now.
payload.mediaElement = currentPayload.mediaElement;
return {
node: destination,
payload: payload,
interruptible: true,
};
});
// Stats are for a single playback/load session. Stats must be initialized
// before we allow calls to |updateStateHistory|.
this.stats_ = new shaka.util.Stats();
// Load's request is a little different, so we can't use our normal
// listeners-to-promise method. It is the only request where we may skip the
// request, so we need to set the on skip callback to reject with a specific
// error.
events.onStart =
() => shaka.log.info('Starting load of ' + assetUri + '...');
return new Promise((resolve, reject) => {
events.onSkip = () => reject(new shaka.util.Error(
shaka.util.Error.Severity.CRITICAL,
shaka.util.Error.Category.PLAYER,
shaka.util.Error.Code.NO_VIDEO_ELEMENT));
events.onEnd = () => {
resolve();
// We dispatch the loaded event when the load promise is resolved
this.dispatchEvent(this.makeEvent_(shaka.Player.EventName.Loaded));
};
events.onCancel = () => reject(this.createAbortLoadError_());
events.onError = (e) => reject(e);
});
}
/**
* Check if src= should be used to load the asset at |uri|. Assume that media
* source is the default option, and that src= is for special cases.
*
* @param {shaka.routing.Payload} payload
* @return {boolean}
* |true| if the content should be loaded with src=, |false| if the content
* should be loaded with MediaSource.
* @private
*/
shouldUseSrcEquals_(payload) {
const Platform = shaka.util.Platform;
// If we are using a platform that does not support media source, we will
// fall back to src= to handle all playback.
if (!Platform.supportsMediaSource()) {
return true;
}
// The most accurate way to tell the player how to load the content is via
// MIME type. We can fall back to features of the URI if needed.
let mimeType = payload.mimeType;
const uri = payload.uri || '';
// If we don't have a MIME type, try to guess based on the file extension.
// TODO: Too generic to belong to ManifestParser now. Refactor.
if (!mimeType) {
// Try using the uri extension.
const extension = shaka.media.ManifestParser.getExtension(uri);
mimeType = shaka.Player.SRC_EQUAL_EXTENSIONS_TO_MIME_TYPES_[extension];
}
// TODO: The load graph system has a design limitation that requires routing
// destination to be chosen synchronously. This means we can only make the
// right choice about src= consistently if we have a well-known file
// extension or API-provided MIME type. Detection of MIME type from a HEAD
// request (as is done for manifest types) can't be done yet.
if (mimeType) {
// If we have a MIME type, check if the browser can play it natively.
// This will cover both single files and native HLS.
const mediaElement = payload.mediaElement || Platform.anyMediaElement();
const canPlayNatively = mediaElement.canPlayType(mimeType) != '';
// If we can't play natively, then src= isn't an option.
if (!canPlayNatively) {
return false;
}
const canPlayMediaSource =
shaka.media.ManifestParser.isSupported(uri, mimeType);
// If MediaSource isn't an option, the native option is our only chance.
if (!canPlayMediaSource) {
return true;
}
// If we land here, both are feasible.
goog.asserts.assert(canPlayNatively && canPlayMediaSource,
'Both native and MSE playback should be possible!');
// We would prefer MediaSource in some cases, and src= in others. For
// example, Android has native HLS, but we'd prefer our own MediaSource
// version there.
// Native HLS can be preferred on any platform via this flag:
if (this.config_.streaming.preferNativeHls) {
return true;
}
// For Safari, we have an older flag which only applies to this one
// browser:
if (Platform.isApple()) {
return this.config_.streaming.useNativeHlsOnSafari;
}
// In all other cases, we prefer MediaSource.
return false;
}
// Unless there are good reasons to use src= (single-file playback or native
// HLS), we prefer MediaSource. So the final return value for choosing src=
// is false.
return false;
}
/**
* This should only be called by the load graph when it is time to attach to
* a media element. The only times this may be called are when we are being
* asked to re-attach to the current media element, or attach to a new media
* element while not attached to a media element.
*
* This method assumes that it is safe for it to execute, the load-graph is
* responsible for ensuring all assumptions are true.
*
* Attaching to a media element is defined as:
* - Registering error listeners to the media element.
* - Caching the video element for use outside of the load graph.
*
* @param {shaka.routing.Payload} has
* @param {shaka.routing.Payload} wants
* @return {!Promise}
* @private
*/
onAttach_(has, wants) {
// If we don't have a media element yet, it means we are entering
// "attach" from another node.
//
// If we have a media element, it should match |wants.mediaElement|
// because it means we are going from "attach" to "attach".
//
// These constraints should be maintained and guaranteed by the routing
// logic in |getNextStep_|.
goog.asserts.assert(
has.mediaElement == null || has.mediaElement == wants.mediaElement,
'The routing logic failed. MediaElement requirement failed.');
if (has.mediaElement == null) {
has.mediaElement = wants.mediaElement;
const onError = (error) => this.onVideoError_(error);
this.eventManager_.listen(has.mediaElement, 'error', onError);
}
this.video_ = has.mediaElement;
return Promise.resolve();
}
/**
* This should only be called by the load graph when it is time to detach from
* a media element. The only times this may be called are when we are being
* asked to detach from the current media element, or detach when we are
* already detached.
*
* This method assumes that it is safe for it to execute, the load-graph is
* responsible for ensuring all assumptions are true.
*
* Detaching from a media element is defined as:
* - Removing error listeners from the media element.
* - Dropping the cached reference to the video element.
*
* @param {shaka.routing.Payload} has
* @param {shaka.routing.Payload} wants
* @return {!Promise}
* @private
*/
onDetach_(has, wants) {
// If we are going from "detached" to "detached" we wouldn't have
// a media element to detach from.
if (has.mediaElement) {
this.eventManager_.unlisten(has.mediaElement, 'error');
has.mediaElement = null;
}
// Clear our cached copy of the media element.
this.video_ = null;
return Promise.resolve();
}
/**
* This should only be called by the load graph when it is time to unload all
* currently initialized playback components. Unlike the other load actions,
* this action is built to be more general. We need to do this because we
* don't know what state the player will be in before unloading (including
* after an error occurred in the middle of a transition).
*
* This method assumes that any component could be |null| and should be safe
* to call from any point in the load graph.
*
* @param {shaka.routing.Payload} has
* @param {shaka.routing.Payload} wants
* @return {!Promise}
* @private
*/
async onUnload_(has, wants) {
// Set the load mode to unload right away so that all the public methods
// will stop using the internal components. We need to make sure that we
// are not overriding the destroyed state because we will unload when we are
// destroying the player.
if (this.loadMode_ != shaka.Player.LoadMode.DESTROYED) {
this.loadMode_ = shaka.Player.LoadMode.NOT_LOADED;
}
// Run any general cleanup tasks now. This should be here at the top, right
// after setting loadMode_, so that internal components still exist as they
// did when the cleanup tasks were registered in the array.
const cleanupTasks = this.cleanupOnUnload_.map((cb) => cb());
this.cleanupOnUnload_ = [];
await Promise.all(cleanupTasks);
// Dispatch the unloading event.
this.dispatchEvent(this.makeEvent_(shaka.Player.EventName.Unloading));
// Remove everything that has to do with loading content from our payload
// since we are releasing everything that depended on it.
has.mimeType = null;
has.startTime = null;
has.uri = null;
// In most cases we should have a media element. The one exception would
// be if there was an error and we, by chance, did not have a media element.
if (has.mediaElement) {
this.eventManager_.unlisten(has.mediaElement, 'loadedmetadata');
this.eventManager_.unlisten(has.mediaElement, 'playing');
this.eventManager_.unlisten(has.mediaElement, 'pause');
this.eventManager_.unlisten(has.mediaElement, 'ended');
this.eventManager_.unlisten(has.mediaElement, 'ratechange');
}
// Some observers use some playback components, shutting down the observers
// first ensures that they don't try to use the playback components
// mid-destroy.
if (this.playheadObservers_) {
this.playheadObservers_.release();
this.playheadObservers_ = null;
}
if (this.bufferPoller_) {
this.bufferPoller_.stop();
this.bufferPoller_ = null;
}
// Stop the parser early. Since it is at the start of the pipeline, it
// should be start early to avoid is pushing new data downstream.
if (this.parser_) {
await this.parser_.stop();
this.parser_ = null;
this.parserFactory_ = null;
}
// Abr Manager will tell streaming engine what to do, so we need to stop
// it before we destroy streaming engine. Unlike with the other components,
// we do not release the instance, we will reuse it in later loads.
if (this.abrManager_) {
await this.abrManager_.stop();
}
// Streaming engine will push new data to media source engine, so we need
// to shut it down before destroy media source engine.
if (this.streamingEngine_) {
await this.streamingEngine_.destroy();
this.streamingEngine_ = null;
}
if (this.playRateController_) {
this.playRateController_.release();
this.playRateController_ = null;
}
// Playhead is used by StreamingEngine, so we can't destroy this until after
// StreamingEngine has stopped.
if (this.playhead_) {
this.playhead_.release();
this.playhead_ = null;
}
// Media source engine holds onto the media element, and in order to detach
// the media keys (with drm engine), we need to break the connection between
// media source engine and the media element.
if (this.mediaSourceEngine_) {
await this.mediaSourceEngine_.destroy();
this.mediaSourceEngine_ = null;
}
if (this.adManager_) {
this.adManager_.onAssetUnload();
}
// In order to unload a media element, we need to remove the src attribute
// and then load again. When we destroy media source engine, this will be
// done for us, but for src=, we need to do it here.
//
// DrmEngine requires this to be done before we destroy DrmEngine itself.
if (has.mediaElement && has.mediaElement.src) {
// TODO: Investigate this more. Only reproduces on Firefox 69.
// Introduce a delay before detaching the video source. We are seeing
// spurious Promise rejections involving an AbortError in our tests
// otherwise.
await new Promise(
(resolve) => new shaka.util.Timer(resolve).tickAfter(0.1));
has.mediaElement.removeAttribute('src');
has.mediaElement.load();
// Remove all track nodes
while (has.mediaElement.lastChild) {
has.mediaElement.removeChild(has.mediaElement.firstChild);
}
}
if (this.drmEngine_) {
await this.drmEngine_.destroy();
this.drmEngine_ = null;
}
this.assetUri_ = null;
this.bufferObserver_ = null;
if (this.manifest_) {
for (const variant of this.manifest_.variants) {
for (const stream of [variant.audio, variant.video]) {
if (stream && stream.segmentIndex) {
stream.segmentIndex.release();
}
}
}
for (const stream of this.manifest_.textStreams) {
if (stream.segmentIndex) {
stream.segmentIndex.release();
}
}
}
this.manifest_ = null;
this.stats_ = new shaka.util.Stats(); // Replace with a clean stats object.
this.lastTextFactory_ = null;
// Make sure that the app knows of the new buffering state.
this.updateBufferState_();
}
/**
* This should only be called by the load graph when it is time to initialize
* media source engine. The only time this may be called is when we are
* attached to the same media element as in the request.
*
* This method assumes that it is safe for it to execute. The load-graph is
* responsible for ensuring all assumptions are true.
*
* @param {shaka.routing.Payload} has
* @param {shaka.routing.Payload} wants
*
* @return {!Promise}
* @private
*/
async onInitializeMediaSourceEngine_(has, wants) {
goog.asserts.assert(
shaka.util.Platform.supportsMediaSource(),
'We should not be initializing media source on a platform that does ' +
'not support media source.');
goog.asserts.assert(
has.mediaElement,
'We should have a media element when initializing media source.');
goog.asserts.assert(
has.mediaElement == wants.mediaElement,
'|has| and |wants| should have the same media element when ' +
'initializing media source.');
goog.asserts.assert(
this.mediaSourceEngine_ == null,
'We should not have a media source engine yet.');
const closedCaptionsParser = new shaka.media.ClosedCaptionParser();
// When changing text visibility we need to update both the text displayer
// and streaming engine because we don't always stream text. To ensure that
// text displayer and streaming engine are always in sync, wait until they
// are both initialized before setting the initial value.
const textDisplayerFactory = this.config_.textDisplayFactory;
const textDisplayer =
shaka.util.Functional.callFactory(textDisplayerFactory);
this.lastTextFactory_ = textDisplayerFactory;
const mediaSourceEngine = this.createMediaSourceEngine(
has.mediaElement,
closedCaptionsParser,
textDisplayer,
(metadata, offset, endTime) => {
this.processTimedMetadataMediaSrc_(metadata, offset, endTime);
});
// Wait for media source engine to finish opening. This promise should
// NEVER be rejected as per the media source engine implementation.
await mediaSourceEngine.open();
// Wait until it is ready to actually store the reference.
this.mediaSourceEngine_ = mediaSourceEngine;
}
/**
* Create the parser for the asset located at |wants.uri|. This should only be
* called as part of the load graph.
*
* This method assumes that it is safe for it to execute, the load-graph is
* responsible for ensuring all assumptions are true.
*
* @param {shaka.routing.Payload} has
* @param {shaka.routing.Payload} wants
* @return {!Promise}
* @private
*/
async onInitializeParser_(has, wants) {
goog.asserts.assert(
has.mediaElement,
'We should have a media element when initializing the parser.');
goog.asserts.assert(
has.mediaElement == wants.mediaElement,
'|has| and |wants| should have the same media element when ' +
'initializing the parser.');
goog.asserts.assert(
this.networkingEngine_,
'Need networking engine when initializing the parser.');
goog.asserts.assert(
this.config_,
'Need player config when initializing the parser.');
// We are going to "lock-in" the mime type and uri since they are
// what we are going to use to create our parser and parse the manifest.
has.mimeType = wants.mimeType;
has.uri = wants.uri;
goog.asserts.assert(
has.uri,
'We should have an asset uri when initializing the parsing.');
// Store references to things we asserted so that we don't need to reassert
// them again later.
const assetUri = has.uri;
const networkingEngine = this.networkingEngine_;
// Save the uri so that it can be used outside of the load-graph.
this.assetUri_ = assetUri;
// Create the parser that we will use to parse the manifest.
this.parserFactory_ = await shaka.media.ManifestParser.getFactory(
assetUri,
networkingEngine,
this.config_.manifest.retryParameters,
has.mimeType);
goog.asserts.assert(this.parserFactory_, 'Must have manifest parser');
this.parser_ = shaka.util.Functional.callFactory(this.parserFactory_);
const manifestConfig =
shaka.util.ObjectUtils.cloneObject(this.config_.manifest);
// Don't read video segments if the player is attached to an audio element
if (wants.mediaElement && wants.mediaElement.nodeName === 'AUDIO') {
manifestConfig.disableVideo = true;
}
this.parser_.configure(manifestConfig);
}
/**
* Parse the manifest at |has.uri| using the parser that should have already
* been created. This should only be called as part of the load graph.
*
* This method assumes that it is safe for it to execute, the load-graph is
* responsible for ensuring all assumptions are true.
*
* @param {shaka.routing.Payload} has
* @param {shaka.routing.Payload} wants
* @return {!shaka.util.AbortableOperation}
* @private
*/
onParseManifest_(has, wants) {
goog.asserts.assert(
has.mimeType == wants.mimeType,
'|has| and |wants| should have the same mime type when parsing.');
goog.asserts.assert(
has.uri == wants.uri,
'|has| and |wants| should have the same uri when parsing.');
goog.asserts.assert(
has.uri,
'|has| should have a valid uri when parsing.');
goog.asserts.assert(
has.uri == this.assetUri_,
'|has.uri| should match the cached asset uri.');
goog.asserts.assert(
this.networkingEngine_,
'Need networking engine to parse manifest.');
goog.asserts.assert(
this.config_,
'Need player config to parse manifest.');
goog.asserts.assert(
this.parser_,
'|this.parser_| should have been set in an earlier step.');
// Store references to things we asserted so that we don't need to reassert
// them again later.
const assetUri = has.uri;
const networkingEngine = this.networkingEngine_;
// This will be needed by the parser once it starts parsing, so we will
// initialize it now even through it appears a little out-of-place.
this.regionTimeline_ =
new shaka.media.RegionTimeline(() => this.seekRange());
this.regionTimeline_.setListeners(/* onRegionAdded= */ (region) => {
this.onRegionEvent_(shaka.Player.EventName.TimelineRegionAdded, region);
if (this.adManager_) {
this.adManager_.onDashTimedMetadata(region);
}
});
// TODO (#1391): Once filterManifest_ is async, remove this eslint disable.
/* eslint-disable require-await */
const playerInterface = {
networkingEngine: networkingEngine,
filter: async (manifest) => this.filterManifest_(manifest),
makeTextStreamsForClosedCaptions: (manifest) => {
return this.makeTextStreamsForClosedCaptions_(manifest);
},
// Called when the parser finds a timeline region. This can be called
// before we start playback or during playback (live/in-progress
// manifest).
onTimelineRegionAdded: (region) => this.regionTimeline_.addRegion(region),
onEvent: (event) => this.dispatchEvent(event),
onError: (error) => this.onError_(error),
isLowLatencyMode: () => this.isLowLatencyMode_(),
isAutoLowLatencyMode: () => this.isAutoLowLatencyMode_(),
enableLowLatencyMode: () => {
this.configure('streaming.lowLatencyMode', true);
},
};
/* eslint-enable require-await */
const startTime = Date.now() / 1000;
return new shaka.util.AbortableOperation(/* promise= */ (async () => {
this.manifest_ = await this.parser_.start(assetUri, playerInterface);
// This event is fired after the manifest is parsed, but before any
// filtering takes place.
const event = this.makeEvent_(shaka.Player.EventName.ManifestParsed);
this.dispatchEvent(event);
// We require all manifests to have at least one variant.
if (this.manifest_.variants.length == 0) {
throw new shaka.util.Error(
shaka.util.Error.Severity.CRITICAL,
shaka.util.Error.Category.MANIFEST,
shaka.util.Error.Code.NO_VARIANTS);
}
// Make sure that all variants are either: audio-only, video-only, or
// audio-video.
shaka.Player.filterForAVVariants_(this.manifest_);
const now = Date.now() / 1000;
const delta = now - startTime;
this.stats_.setManifestTime(delta);
})(), /* onAbort= */ () => {
shaka.log.info('Aborting parser step...');
return this.parser_.stop();
});
}
/**
* This should only be called by the load graph when it is time to initialize
* drmEngine. The only time this may be called is when we are attached a
* media element and have parsed a manifest.
*
* The load-graph is responsible for ensuring all assumptions made by this
* method are valid before executing it.
*
* @param {shaka.routing.Payload} has
* @param {shaka.routing.Payload} wants
* @return {!Promise}
* @private
*/
async onInitializeDrm_(has, wants) {
goog.asserts.assert(
has.mimeType == wants.mimeType,
'The load graph should have ensured the mime types matched.');
goog.asserts.assert(
has.uri == wants.uri,
'The load graph should have ensured the uris matched');
goog.asserts.assert(
this.networkingEngine_,
'|onInitializeDrm_| should never be called after |destroy|');
goog.asserts.assert(
this.config_,
'|onInitializeDrm_| should never be called after |destroy|');
goog.asserts.assert(
this.manifest_,
'|this.manifest_| should have been set in an earlier step.');
goog.asserts.assert(
has.mediaElement,
'We should have a media element when initializing the DRM Engine.');
const startTime = Date.now() / 1000;
let firstEvent = true;
this.drmEngine_ = this.createDrmEngine({
netEngine: this.networkingEngine_,
onError: (e) => {
this.onError_(e);
},
onKeyStatus: (map) => {
this.onKeyStatus_(map);
},
onExpirationUpdated: (id, expiration) => {
this.onExpirationUpdated_(id, expiration);
},
onEvent: (e) => {
this.dispatchEvent(e);
if (e.type == shaka.Player.EventName.DrmSessionUpdate && firstEvent) {
firstEvent = false;
const now = Date.now() / 1000;
const delta = now - startTime;
this.stats_.setDrmTime(delta);
}
},
});
this.drmEngine_.configure(this.config_.drm);
await this.drmEngine_.initForPlayback(
this.manifest_.variants,
this.manifest_.offlineSessionIds);
await this.drmEngine_.attach(has.mediaElement);
// Now that we have drm information, filter the manifest (again) so that we
// can ensure we only use variants with the selected key system.
await this.filterManifest_(this.manifest_);
}
/**
* This should only be called by the load graph when it is time to load all
* playback components needed for playback. The only times this may be called
* is when we are attached to the same media element as in the request.
*
* This method assumes that it is safe for it to execute, the load-graph is
* responsible for ensuring all assumptions are true.
*
* Loading is defined as:
* - Attaching all playback-related listeners to the media element
* - Initializing playback and observers
* - Initializing ABR Manager
* - Initializing Streaming Engine
* - Starting playback at |wants.startTime|
*
* @param {shaka.routing.Payload} has
* @param {shaka.routing.Payload} wants
* @private
*/
async onLoad_(has, wants) {
goog.asserts.assert(
has.mimeType == wants.mimeType,
'|has| and |wants| should have the same mime type when loading.');
goog.asserts.assert(
has.uri == wants.uri,
'|has| and |wants| should have the same uri when loading.');
goog.asserts.assert(
has.mediaElement,
'We should have a media element when loading.');
goog.asserts.assert(
!isNaN(wants.startTimeOfLoad),
'|wants| should tell us when the load was originally requested');
// Since we are about to start playback, we will lock in the start time as
// something we are now depending on.
has.startTime = wants.startTime;
// If updateStartTime() has been called since load() was invoked use the
// requested startTime
if (this.updatedStartTime_ != null) {
has.startTime = this.updatedStartTime_;
this.updatedStartTime_ = null;
}
// Store a reference to values in |has| after asserting so that closure will
// know that they will still be non-null between calls to await.
const mediaElement = has.mediaElement;
const assetUri = has.uri;
// Save the uri so that it can be used outside of the load-graph.
this.assetUri_ = assetUri;
this.playRateController_ = new shaka.media.PlayRateController({
getRate: () => has.mediaElement.playbackRate,
getDefaultRate: () => has.mediaElement.defaultPlaybackRate,
setRate: (rate) => { has.mediaElement.playbackRate = rate; },
movePlayhead: (delta) => { has.mediaElement.currentTime += delta; },
});
const updateStateHistory = () => this.updateStateHistory_();
const onRateChange = () => this.onRateChange_();
this.eventManager_.listen(mediaElement, 'playing', updateStateHistory);
this.eventManager_.listen(mediaElement, 'pause', updateStateHistory);
this.eventManager_.listen(mediaElement, 'ended', updateStateHistory);
this.eventManager_.listen(mediaElement, 'ratechange', onRateChange);
const abrFactory = this.config_.abrFactory;
if (!this.abrManager_ || this.abrManagerFactory_ != abrFactory) {
this.abrManagerFactory_ = abrFactory;
this.abrManager_ = shaka.util.Functional.callFactory(abrFactory);
if (typeof this.abrManager_.playbackRateChanged != 'function') {
shaka.Deprecate.deprecateFeature(4,
'AbrManager',
'Please use an AbrManager with playbackRateChanged function.');
this.abrManager_.playbackRateChanged = (rate) => {};
}
this.abrManager_.configure(this.config_.abr);
}
// Copy preferred languages from the config again, in case the config was
// changed between construction and playback.
this.currentAdaptationSetCriteria_ =
new shaka.media.PreferenceBasedCriteria(
this.config_.preferredAudioLanguage,
this.config_.preferredVariantRole,
this.config_.preferredAudioChannelCount);
this.currentTextLanguage_ = this.config_.preferredTextLanguage;
this.currentTextRole_ = this.config_.preferredTextRole;
this.currentTextForced_ = this.config_.preferForcedSubs;
shaka.Player.applyPlayRange_(this.manifest_.presentationTimeline,
this.config_.playRangeStart,
this.config_.playRangeEnd);
this.abrManager_.init((variant, clearBuffer, safeMargin) => {
return this.switch_(variant, clearBuffer, safeMargin);
});
this.playhead_ = this.createPlayhead(has.startTime);
this.playheadObservers_ = this.createPlayheadObserversForMSE_();
// We need to start the buffer management code near the end because it will
// set the initial buffering state and that depends on other components
// being initialized.
const rebufferThreshold = Math.max(
this.manifest_.minBufferTime, this.config_.streaming.rebufferingGoal);
this.startBufferManagement_(rebufferThreshold);
// If the content is multi-codec and the browser can play more than one of
// them, choose codecs now before we initialize streaming.
shaka.util.StreamUtils.chooseCodecsAndFilterManifest(
this.manifest_,
this.config_.preferredVideoCodecs,
this.config_.preferredAudioCodecs,
this.config_.preferredAudioChannelCount,
this.config_.preferredDecodingAttributes);
this.streamingEngine_ = this.createStreamingEngine();
this.streamingEngine_.configure(this.config_.streaming);
// Set the load mode to "loaded with media source" as late as possible so
// that public methods won't try to access internal components until
// they're all initialized. We MUST switch to loaded before calling
// "streaming" so that they can access internal information.
this.loadMode_ = shaka.Player.LoadMode.MEDIA_SOURCE;
// The event must be fired after we filter by restrictions but before the
// active stream is picked to allow those listening for the "streaming"
// event to make changes before streaming starts.
this.dispatchEvent(this.makeEvent_(shaka.Player.EventName.Streaming));
// Pick the initial streams to play.
// however, we would skip switch to initial variant
// if user already pick variant track (via selectVariantTrack api)
let initialVariant = null;
const activeVariantTrack = this.getVariantTracks().find((t) => t.active);
if (!activeVariantTrack) {
initialVariant = this.chooseVariant_();
goog.asserts.assert(initialVariant, 'Must choose an initial variant!');
this.switchVariant_(initialVariant, /* fromAdaptation= */ true,
/* clearBuffer= */ false, /* safeMargin= */ 0);
// Now that we have initial streams, we may adjust the start time to align
// to a segment boundary.
if (this.config_.streaming.startAtSegmentBoundary) {
const startTime = this.playhead_.getTime();
const adjustedTime =
await this.adjustStartTime_(initialVariant, startTime);
this.playhead_.setStartTime(adjustedTime);
}
// Since the first streams just became active, send an adaptation event.
this.onAdaptation_(null,
shaka.util.StreamUtils.variantToTrack(initialVariant));
}
// Decide if text should be shown automatically.
// similar to video/audio track, we would skip switch initial text track
// if user already pick text track (via selectTextTrack api)
const activeTextTrack = this.getTextTracks().find((t) => t.active);
if (!activeTextTrack) {
const initialTextStream = this.chooseTextStream_();
if (initialTextStream) {
this.addTextStreamToSwitchHistory_(
initialTextStream, /* fromAdaptation= */ true);
}
if (initialVariant) {
this.setInitialTextState_(initialVariant, initialTextStream);
}
// Don't initialize with a text stream unless we should be streaming text.
if (initialTextStream && this.shouldStreamText_()) {
this.streamingEngine_.switchTextStream(initialTextStream);
}
}
// Start streaming content. This will start the flow of content down to
// media source.
await this.streamingEngine_.start();
if (this.config_.abr.enabled) {
this.abrManager_.enable();
this.onAbrStatusChanged_();
}
// Re-filter the manifest after streams have been chosen.
this.filterManifestByCurrentVariant_();
// Dispatch a 'trackschanged' event now that all initial filtering is done.
this.onTracksChanged_();
// Now that we've filtered out variants that aren't compatible with the
// active one, update abr manager with filtered variants.
// NOTE: This may be unnecessary. We've already chosen one codec in
// chooseCodecsAndFilterManifest_ before we started streaming. But it
// doesn't hurt, and this will all change when we start using
// MediaCapabilities and codec switching.
// TODO(#1391): Re-evaluate with MediaCapabilities and codec switching.
this.updateAbrManagerVariants_();
const hasPrimary = this.manifest_.variants.some((v) => v.primary);
if (!this.config_.preferredAudioLanguage && !hasPrimary) {
shaka.log.warning('No preferred audio language set. We have chosen an ' +
'arbitrary language initially');
}
// Wait for the 'loadedmetadata' event to measure load() latency.
this.eventManager_.listenOnce(mediaElement, 'loadedmetadata', () => {
const now = Date.now() / 1000;
const delta = now - wants.startTimeOfLoad;
this.stats_.setLoadLatency(delta);
});
}
/**
* This should only be called by the load graph when it is time to initialize
* drmEngine for src= playbacks.
*
* The load-graph is responsible for ensuring all assumptions made by this
* method are valid before executing it.
*
* @param {shaka.routing.Payload} has
* @param {shaka.routing.Payload} wants
* @return {!Promise}
* @private
*/
async onInitializeSrcEqualsDrm_(has, wants) {
const ContentType = shaka.util.ManifestParserUtils.ContentType;
goog.asserts.assert(
this.networkingEngine_,
'|onInitializeSrcEqualsDrm_| should never be called after |destroy|');
goog.asserts.assert(
this.config_,
'|onInitializeSrcEqualsDrm_| should never be called after |destroy|');
const startTime = Date.now() / 1000;
let firstEvent = true;
this.drmEngine_ = this.createDrmEngine({
netEngine: this.networkingEngine_,
onError: (e) => {
this.onError_(e);
},
onKeyStatus: (map) => {
this.onKeyStatus_(map);
},
onExpirationUpdated: (id, expiration) => {
this.onExpirationUpdated_(id, expiration);
},
onEvent: (e) => {
this.dispatchEvent(e);
if (e.type == shaka.Player.EventName.DrmSessionUpdate && firstEvent) {
firstEvent = false;
const now = Date.now() / 1000;
const delta = now - startTime;
this.stats_.setDrmTime(delta);
}
},
});
this.drmEngine_.configure(this.config_.drm);
const uri = wants.uri || '';
const extension = shaka.media.ManifestParser.getExtension(uri);
let mimeType = shaka.Player.SRC_EQUAL_EXTENSIONS_TO_MIME_TYPES_[extension];
if (mimeType == 'application/x-mpegurl' && shaka.util.Platform.isApple()) {
mimeType = 'application/vnd.apple.mpegurl';
}
if (!mimeType) {
mimeType = 'video/mp4';
}
// TODO: Instead of feeding DrmEngine with Variants, we should refactor
// DrmEngine so that it takes a minimal config derived from Variants. In
// cases like this one or in removal of stored content, the details are
// largely unimportant. We should have a saner way to initialize DrmEngine.
// That would also insulate DrmEngine from manifest changes in the future.
// For now, that is time-consuming and this synthetic Variant is easy, so
// I'm putting it off. Since this is only expected to be used for native
// HLS in Safari, this should be safe. -JCP
/** @type {shaka.extern.Variant} */
const variant = {
id: 0,
language: 'und',
primary: false,
audio: null,
video: {
id: 0,
originalId: null,
createSegmentIndex: () => Promise.resolve(),
segmentIndex: null,
mimeType: wants.mimeType ?
shaka.util.MimeUtils.getBasicType(wants.mimeType) : mimeType,
codecs: wants.mimeType ?
shaka.util.MimeUtils.getCodecs(wants.mimeType) : '',
encrypted: true,
drmInfos: [], // Filled in by DrmEngine config.
keyIds: new Set(),
language: 'und',
label: null,
type: ContentType.VIDEO,
primary: false,
trickModeVideo: null,
emsgSchemeIdUris: null,
roles: [],
forced: false,
channelsCount: null,
audioSamplingRate: null,
spatialAudio: false,
closedCaptions: null,
},
bandwidth: 100,
allowedByApplication: true,
allowedByKeySystem: true,
decodingInfos: [],
};
this.drmEngine_.setSrcEquals(/* srcEquals= */ true);
await this.drmEngine_.initForPlayback(
[variant], /* offlineSessionIds= */ []);
await this.drmEngine_.attach(has.mediaElement);
}
/**
* This should only be called by the load graph when it is time to set-up the
* media element to play content using src=. The only times this may be called
* is when we are attached to the same media element as in the request.
*
* This method assumes that it is safe for it to execute, the load-graph is
* responsible for ensuring all assumptions are true.
*
* @param {shaka.routing.Payload} has
* @param {shaka.routing.Payload} wants
* @return {!shaka.util.AbortableOperation}
*
* @private
*/
onSrcEquals_(has, wants) {
goog.asserts.assert(
has.mediaElement,
'We should have a media element when loading.');
goog.asserts.assert(
wants.uri,
'|has| should have a valid uri when loading.');
goog.asserts.assert(
!isNaN(wants.startTimeOfLoad),
'|wants| should tell us when the load was originally requested');
goog.asserts.assert(
this.video_ == has.mediaElement,
'The video element should match our media element');
// Lock-in the values that we are using so that the routing logic knows what
// we have.
has.uri = wants.uri;
has.startTime = wants.startTime;
// Save the uri so that it can be used outside of the load-graph.
this.assetUri_ = has.uri;
this.playhead_ = new shaka.media.SrcEqualsPlayhead(has.mediaElement);
if (has.startTime != null) {
this.playhead_.setStartTime(has.startTime);
}
this.playRateController_ = new shaka.media.PlayRateController({
getRate: () => has.mediaElement.playbackRate,
getDefaultRate: () => has.mediaElement.defaultPlaybackRate,
setRate: (rate) => { has.mediaElement.playbackRate = rate; },
movePlayhead: (delta) => { has.mediaElement.currentTime += delta; },
});
// We need to start the buffer management code near the end because it will
// set the initial buffering state and that depends on other components
// being initialized.
const rebufferThreshold = this.config_.streaming.rebufferingGoal;
this.startBufferManagement_(rebufferThreshold);
// Add all media element listeners.
const updateStateHistory = () => this.updateStateHistory_();
const onRateChange = () => this.onRateChange_();
this.eventManager_.listen(has.mediaElement, 'playing', updateStateHistory);
this.eventManager_.listen(has.mediaElement, 'pause', updateStateHistory);
this.eventManager_.listen(has.mediaElement, 'ended', updateStateHistory);
this.eventManager_.listen(has.mediaElement, 'ratechange', onRateChange);
// Wait for the 'loadedmetadata' event to measure load() latency, but only
// if preload is set in a way that would result in this event firing
// automatically. See https://github.com/google/shaka-player/issues/2483
if (this.video_.preload != 'none') {
this.eventManager_.listenOnce(this.video_, 'loadedmetadata', () => {
const now = Date.now() / 1000;
const delta = now - wants.startTimeOfLoad;
this.stats_.setLoadLatency(delta);
});
}
// The audio tracks are only available on Safari at the moment, but this
// drives the tracks API for Safari's native HLS. So when they change,
// fire the corresponding Shaka Player event.
if (this.video_.audioTracks) {
this.eventManager_.listen(
this.video_.audioTracks, 'addtrack', () => this.onTracksChanged_());
this.eventManager_.listen(
this.video_.audioTracks, 'removetrack',
() => this.onTracksChanged_());
this.eventManager_.listen(
this.video_.audioTracks, 'change', () => this.onTracksChanged_());
}
if (this.video_.textTracks) {
this.eventManager_.listen(this.video_.textTracks, 'addtrack', (e) => {
const trackEvent = /** @type {!TrackEvent} */(e);
if (trackEvent.track) {
const track = trackEvent.track;
goog.asserts.assert(track instanceof TextTrack, 'Wrong track type!');
switch (track.kind) {
case 'metadata':
this.processTimedMetadataSrcEqls_(track);
break;
case 'chapters':
this.processChaptersTrack_(track);
break;
default:
this.onTracksChanged_();
break;
}
}
});
this.eventManager_.listen(
this.video_.textTracks, 'removetrack', () => this.onTracksChanged_());
this.eventManager_.listen(
this.video_.textTracks, 'change', () => this.onTracksChanged_());
}
// By setting |src| we are done "loading" with src=. We don't need to set
// the current time because |playhead| will do that for us.
has.mediaElement.src = has.uri;
// Tizen 3 / WebOS won't load anything unless you call load() explicitly,
// no matter the value of the preload attribute. This is harmful on some
// other platforms by triggering unbounded loading of media data, but is
// necessary here.
if (shaka.util.Platform.isTizen() || shaka.util.Platform.isWebOS()) {
has.mediaElement.load();
}
// Set the load mode last so that we know that all our components are
// initialized.
this.loadMode_ = shaka.Player.LoadMode.SRC_EQUALS;
// The event doesn't mean as much for src= playback, since we don't control
// streaming. But we should fire it in this path anyway since some
// applications may be expecting it as a life-cycle event.
this.dispatchEvent(this.makeEvent_(shaka.Player.EventName.Streaming));
// The "load" Promise is resolved when we have loaded the metadata. If we
// wait for the full data, that won't happen on Safari until the play button
// is hit.
const fullyLoaded = new shaka.util.PublicPromise();
shaka.util.MediaReadyState.waitForReadyState(this.video_,
HTMLMediaElement.HAVE_METADATA,
this.eventManager_,
() => {
fullyLoaded.resolve();
});
// This flag is used below in the language preference setup to check if this
// load was canceled before the necessary events fire.
let unloaded = false;
this.cleanupOnUnload_.push(() => {
unloaded = true;
});
// We can't switch to preferred languages, though, until the data is loaded.
shaka.util.MediaReadyState.waitForReadyState(this.video_,
HTMLMediaElement.HAVE_CURRENT_DATA,
this.eventManager_,
async () => {
// If we have moved on to another piece of content while waiting for
// the above event, we should not change tracks here.
if (unloaded) {
return;
}
this.setupPreferredAudioOnSrc_();
// Applying the text preference too soon can result in it being
// reverted. Wait for native HLS to pick something first.
const textTracks = this.getFilteredTextTracks_();
if (!textTracks.find((t) => t.mode != 'disabled')) {
await new Promise((resolve) => {
this.eventManager_.listenOnce(
this.video_.textTracks, 'change', resolve);
// We expect the event to fire because it does on Safari.
// But in case it doesn't on some other platform or future
// version, move on in 1 second no matter what. This keeps the
// language settings from being completely ignored if something
// goes wrong.
new shaka.util.Timer(resolve).tickAfter(1);
});
}
// If we have moved on to another piece of content while waiting for
// the above event/timer, we should not change tracks here.
if (unloaded) {
return;
}
this.setupPreferredTextOnSrc_();
});
if (this.video_.error) {
// Already failed!
fullyLoaded.reject(this.videoErrorToShakaError_());
} else if (this.video_.preload == 'none') {
shaka.log.alwaysWarn(
'With <video preload="none">, the browser will not load anything ' +
'until play() is called. We are unable to measure load latency in ' +
'a meaningful way, and we cannot provide track info yet. Please do ' +
'not use preload="none" with Shaka Player.');
// We can't wait for an event load loadedmetadata, since that will be
// blocked until a user interaction. So resolve the Promise now.
fullyLoaded.resolve();
}
this.eventManager_.listenOnce(this.video_, 'error', () => {
fullyLoaded.reject(this.videoErrorToShakaError_());
});
return new shaka.util.AbortableOperation(fullyLoaded, /* onAbort= */ () => {
const abortedError = new shaka.util.Error(
shaka.util.Error.Severity.CRITICAL,
shaka.util.Error.Category.PLAYER,
shaka.util.Error.Code.OPERATION_ABORTED);
fullyLoaded.reject(abortedError);
return Promise.resolve(); // Abort complete.
});
}
/**
* This method setup the preferred audio using src=..
*
* @private
*/
setupPreferredAudioOnSrc_() {
const preferredAudioLanguage = this.config_.preferredAudioLanguage;
// If the user has not selected a preference, the browser preference is
// left.
if (preferredAudioLanguage == '') {
return;
}
this.selectAudioLanguage(preferredAudioLanguage);
const preferredVariantRole = this.config_.preferredVariantRole;
// If the user has not selected a role preference, the previous match is
// selected.
if (preferredVariantRole == '') {
return;
}
this.selectAudioLanguage(preferredAudioLanguage, preferredVariantRole);
}
/**
* This method setup the preferred text using src=.
*
* @private
*/
setupPreferredTextOnSrc_() {
const preferredTextLanguage = this.config_.preferredTextLanguage;
const preferForcedSubs = this.config_.preferForcedSubs;
// If the user has not selected a preference, the browser preference is
// left.
if (preferredTextLanguage == '') {
return;
}
this.selectTextLanguage(preferredTextLanguage, '', preferForcedSubs);
const preferredTextRole = this.config_.preferredTextRole;
// If the user has not selected a role preference, the previous match is
// selected.
if (preferredTextRole == '') {
return;
}
this.selectTextLanguage(preferredTextLanguage, preferredTextRole,
preferForcedSubs);
}
/**
* We're looking for metadata tracks to process id3 tags. One of the uses is
* for ad info on LIVE streams
*
* @param {!TextTrack} track
* @private
*/
processTimedMetadataSrcEqls_(track) {
if (track.kind != 'metadata') {
return;
}
// Hidden mode is required for the cuechange event to launch correctly
track.mode = 'hidden';
this.eventManager_.listen(track, 'cuechange', () => {
if (!track.activeCues) {
return;
}
for (const cue of track.activeCues) {
this.dispatchMetadataEvent_(cue.startTime, cue.endTime,
cue.type, cue.value);
if (this.adManager_) {
this.adManager_.onCueMetadataChange(cue.value);
}
}
});
// In Safari the initial assignment does not always work, so we schedule
// this process to be repeated several times to ensure that it has been put
// in the correct mode.
new shaka.util.Timer(() => {
const textTracks = this.getMetadataTracks_();
for (const textTrack of textTracks) {
textTrack.mode = 'hidden';
}
}).tickNow().tickAfter(/* seconds= */ 0.5);
}
/**
* @param {!Array.<shaka.extern.ID3Metadata>} metadata
* @param {number} offset
* @param {?number} segmentEndTime
* @private
*/
processTimedMetadataMediaSrc_(metadata, offset, segmentEndTime) {
for (const sample of metadata) {
if (sample['data'] && sample['cueTime'] && sample['frames']) {
const start = sample['cueTime'] + offset;
const end = segmentEndTime;
const metadataType = 'ID3';
for (const frame of sample['frames']) {
const payload = frame;
this.dispatchMetadataEvent_(start, end, metadataType, payload);
}
if (this.adManager_) {
this.adManager_.onHlsTimedMetadata(sample, start);
}
}
}
}
/**
* Construct and fire a Player.Metadata event
*
* @param {number} startTime
* @param {?number} endTime
* @param {string} metadataType
* @param {shaka.extern.ID3Metadata} payload
* @private
*/
dispatchMetadataEvent_(startTime, endTime, metadataType, payload) {
goog.asserts.assert(!endTime || startTime <= endTime,
'Metadata start time should be less or equal to the end time!');
const eventName = shaka.Player.EventName.Metadata;
const data = {
startTime: startTime,
endTime: endTime,
metadataType: metadataType,
payload: payload,
};
this.dispatchEvent(this.makeEvent_(eventName, data));
}
/**
* We're looking for chapters tracks to process the chapters.
*
* @param {?TextTrack} track
* @private
*/
processChaptersTrack_(track) {
if (!track || track.kind != 'chapters') {
return;
}
// Hidden mode is required for the cuechange event to launch correctly and
// get the cues and the activeCues
track.mode = 'hidden';
// In Safari the initial assignment does not always work, so we schedule
// this process to be repeated several times to ensure that it has been put
// in the correct mode.
new shaka.util.Timer(() => {
const chaptersTracks = this.getChaptersTracks_();
for (const chaptersTrack of chaptersTracks) {
chaptersTrack.mode = 'hidden';
}
}).tickNow().tickAfter(/* seconds= */ 0.5);
}
/**
* Take a series of variants and ensure that they only contain one type of
* variant. The different options are:
* 1. Audio-Video
* 2. Audio-Only
* 3. Video-Only
*
* A manifest can only contain a single type because once we initialize media
* source to expect specific streams, it must always have content for those
* streams. If we were to start with audio+video and switch to an audio-only
* variant, media source would block waiting for video content.
*
* @param {shaka.extern.Manifest} manifest
* @private
*/
static filterForAVVariants_(manifest) {
const isAVVariant = (variant) => {
// Audio-video variants may include both streams separately or may be
// single multiplexed streams with multiple codecs.
return (variant.video && variant.audio) ||
(variant.video && variant.video.codecs.includes(','));
};
if (manifest.variants.some(isAVVariant)) {
shaka.log.debug('Found variant with audio and video content, ' +
'so filtering out audio-only content.');
manifest.variants = manifest.variants.filter(isAVVariant);
}
}
/**
* Create a new DrmEngine instance. This may be replaced by tests to create
* fake instances. Configuration and initialization will be handled after
* |createDrmEngine|.
*
* @param {shaka.media.DrmEngine.PlayerInterface} playerInterface
* @return {!shaka.media.DrmEngine}
*/
createDrmEngine(playerInterface) {
const updateExpirationTime = this.config_.drm.updateExpirationTime;
return new shaka.media.DrmEngine(playerInterface, updateExpirationTime);
}
/**
* Creates a new instance of NetworkingEngine. This can be replaced by tests
* to create fake instances instead.
*
* @return {!shaka.net.NetworkingEngine}
*/
createNetworkingEngine() {
/** @type {function(number, number)} */
const onProgressUpdated_ = (deltaTimeMs, bytesDownloaded) => {
// In some situations, such as during offline storage, the abr manager
// might not yet exist. Therefore, we need to check if abr manager has
// been initialized before using it.
if (this.abrManager_) {
this.abrManager_.segmentDownloaded(deltaTimeMs, bytesDownloaded);
}
};
/** @type {shaka.net.NetworkingEngine.OnHeadersReceived} */
const onHeadersReceived_ = (headers, request, requestType) => {
// Release a 'downloadheadersreceived' event.
const name = shaka.Player.EventName.DownloadHeadersReceived;
const data = {
headers,
request,
requestType,
};
this.dispatchEvent(this.makeEvent_(name, data));
};
/** @type {shaka.net.NetworkingEngine.OnDownloadFailed} */
const onDownloadFailed_ = (request, error, httpResponseCode, aborted) => {
// Release a 'downloadfailed' event.
const name = shaka.Player.EventName.DownloadFailed;
const data = {
request,
error,
httpResponseCode,
aborted,
};
this.dispatchEvent(this.makeEvent_(name, data));
};
return new shaka.net.NetworkingEngine(
onProgressUpdated_, onHeadersReceived_, onDownloadFailed_);
}
/**
* Creates a new instance of Playhead. This can be replaced by tests to
* create fake instances instead.
*
* @param {?number} startTime
* @return {!shaka.media.Playhead}
*/
createPlayhead(startTime) {
goog.asserts.assert(this.manifest_, 'Must have manifest');
goog.asserts.assert(this.video_, 'Must have video');
return new shaka.media.MediaSourcePlayhead(
this.video_,
this.manifest_,
this.config_.streaming,
startTime,
() => this.onSeek_(),
(event) => this.dispatchEvent(event));
}
/**
* Create the observers for MSE playback. These observers are responsible for
* notifying the app and player of specific events during MSE playback.
*
* @return {!shaka.media.PlayheadObserverManager}
* @private
*/
createPlayheadObserversForMSE_() {
goog.asserts.assert(this.manifest_, 'Must have manifest');
goog.asserts.assert(this.regionTimeline_, 'Must have region timeline');
goog.asserts.assert(this.video_, 'Must have video element');
// Create the region observer. This will allow us to notify the app when we
// move in and out of timeline regions.
const regionObserver = new shaka.media.RegionObserver(this.regionTimeline_);
const onEnterRegion = (region, seeking) => {
this.onRegionEvent_(shaka.Player.EventName.TimelineRegionEnter, region);
};
const onExitRegion = (region, seeking) => {
this.onRegionEvent_(shaka.Player.EventName.TimelineRegionExit, region);
};
const onSkipRegion = (region, seeking) => {
// If we are seeking, we don't want to surface the enter/exit events since
// they didn't play through them.
if (!seeking) {
this.onRegionEvent_(shaka.Player.EventName.TimelineRegionEnter, region);
this.onRegionEvent_(shaka.Player.EventName.TimelineRegionExit, region);
}
};
regionObserver.setListeners(onEnterRegion, onExitRegion, onSkipRegion);
// Now that we have all our observers, create a manager for them.
const manager = new shaka.media.PlayheadObserverManager(this.video_);
manager.manage(regionObserver);
return manager;
}
/**
* Initialize and start the buffering system (observer and timer) so that we
* can monitor our buffer lead during playback.
*
* @param {number} rebufferingGoal
* @private
*/
startBufferManagement_(rebufferingGoal) {
goog.asserts.assert(
!this.bufferObserver_,
'No buffering observer should exist before initialization.');
goog.asserts.assert(
!this.bufferPoller_,
'No buffer timer should exist before initialization.');
// Give dummy values, will be updated below.
this.bufferObserver_ = new shaka.media.BufferingObserver(1, 2);
// Force us back to a buffering state. This ensure everything is starting in
// the same state.
this.bufferObserver_.setState(shaka.media.BufferingObserver.State.STARVING);
this.updateBufferingSettings_(rebufferingGoal);
this.updateBufferState_();
// TODO: We should take some time to look into the effects of our
// quarter-second refresh practice. We often use a quarter-second
// but we have no documentation about why.
this.bufferPoller_ = new shaka.util.Timer(() => {
this.pollBufferState_();
}).tickEvery(/* seconds= */ 0.25);
}
/**
* Updates the buffering thresholds based on the new rebuffering goal.
*
* @param {number} rebufferingGoal
* @private
*/
updateBufferingSettings_(rebufferingGoal) {
// The threshold to transition back to satisfied when starving.
const starvingThreshold = rebufferingGoal;
// The threshold to transition into starving when satisfied.
// We use a "typical" threshold, unless the rebufferingGoal is unusually
// low.
// Then we force the value down to half the rebufferingGoal, since
// starvingThreshold must be strictly larger than satisfiedThreshold for the
// logic in BufferingObserver to work correctly.
const satisfiedThreshold = Math.min(
shaka.Player.TYPICAL_BUFFERING_THRESHOLD_, rebufferingGoal / 2);
this.bufferObserver_.setThresholds(starvingThreshold, satisfiedThreshold);
}
/**
* This method is called periodically to check what the buffering observer
* says so that we can update the rest of the buffering behaviours.
*
* @private
*/
pollBufferState_() {
goog.asserts.assert(
this.video_,
'Need a media element to update the buffering observer');
goog.asserts.assert(
this.bufferObserver_,
'Need a buffering observer to update');
let bufferedToEnd;
switch (this.loadMode_) {
case shaka.Player.LoadMode.SRC_EQUALS:
bufferedToEnd = this.isBufferedToEndSrc_();
break;
case shaka.Player.LoadMode.MEDIA_SOURCE:
bufferedToEnd = this.isBufferedToEndMS_();
break;
default:
bufferedToEnd = false;
break;
}
const bufferLead = shaka.media.TimeRangesUtils.bufferedAheadOf(
this.video_.buffered,
this.video_.currentTime);
const stateChanged = this.bufferObserver_.update(bufferLead, bufferedToEnd);
// If the state changed, we need to surface the event.
if (stateChanged) {
this.updateBufferState_();
}
}
/**
* Create a new media source engine. This will ONLY be replaced by tests as a
* way to inject fake media source engine instances.
*
* @param {!HTMLMediaElement} mediaElement
* @param {!shaka.media.IClosedCaptionParser} closedCaptionsParser
* @param {!shaka.extern.TextDisplayer} textDisplayer
* @param {!function(!Array.<shaka.extern.ID3Metadata>, number, ?number)}
* onMetadata
*
* @return {!shaka.media.MediaSourceEngine}
*/
createMediaSourceEngine(mediaElement, closedCaptionsParser, textDisplayer,
onMetadata) {
return new shaka.media.MediaSourceEngine(
mediaElement, closedCaptionsParser, textDisplayer, onMetadata);
}
/**
* Creates a new instance of StreamingEngine. This can be replaced by tests
* to create fake instances instead.
*
* @return {!shaka.media.StreamingEngine}
*/
createStreamingEngine() {
goog.asserts.assert(
this.playhead_ && this.abrManager_ && this.mediaSourceEngine_ &&
this.manifest_,
'Must not be destroyed');
/** @type {shaka.media.StreamingEngine.PlayerInterface} */
const playerInterface = {
getPresentationTime: () => this.playhead_.getTime(),
getBandwidthEstimate: () => this.abrManager_.getBandwidthEstimate(),
mediaSourceEngine: this.mediaSourceEngine_,
netEngine: this.networkingEngine_,
onError: (error) => this.onError_(error),
onEvent: (event) => this.dispatchEvent(event),
onManifestUpdate: () => this.onManifestUpdate_(),
onSegmentAppended: () => this.onSegmentAppended_(),
};
return new shaka.media.StreamingEngine(this.manifest_, playerInterface);
}
/**
* Changes configuration settings on the Player. This checks the names of
* keys and the types of values to avoid coding errors. If there are errors,
* this logs them to the console and returns false. Correct fields are still
* applied even if there are other errors. You can pass an explicit
* <code>undefined</code> value to restore the default value. This has two
* modes of operation:
*
* <p>
* First, this can be passed a single "plain" object. This object should
* follow the {@link shaka.extern.PlayerConfiguration} object. Not all fields
* need to be set; unset fields retain their old values.
*
* <p>
* Second, this can be passed two arguments. The first is the name of the key
* to set. This should be a '.' separated path to the key. For example,
* <code>'streaming.alwaysStreamText'</code>. The second argument is the
* value to set.
*
* @param {string|!Object} config This should either be a field name or an
* object.
* @param {*=} value In the second mode, this is the value to set.
* @return {boolean} True if the passed config object was valid, false if
* there were invalid entries.
* @export
*/
configure(config, value) {
goog.asserts.assert(this.config_, 'Config must not be null!');
goog.asserts.assert(typeof(config) == 'object' || arguments.length == 2,
'String configs should have values!');
// ('fieldName', value) format
if (arguments.length == 2 && typeof(config) == 'string') {
config = shaka.util.ConfigUtils.convertToConfigObject(config, value);
}
goog.asserts.assert(typeof(config) == 'object', 'Should be an object!');
// Deprecate 'manifest.dash.defaultPresentationDelay' configuration.
if (config['manifest'] && config['manifest']['dash'] &&
'defaultPresentationDelay' in config['manifest']['dash']) {
shaka.Deprecate.deprecateFeature(4,
'manifest.dash.defaultPresentationDelay configuration',
'Please Use manifest.defaultPresentationDelay instead.');
config['manifest']['defaultPresentationDelay'] =
config['manifest']['dash']['defaultPresentationDelay'];
delete config['manifest']['dash']['defaultPresentationDelay'];
}
// If lowLatencyMode is enabled, and inaccurateManifestTolerance and
// rebufferingGoal are not specified, set inaccurateManifestTolerance to 0
// and rebufferingGoal to 0.01 by default for low latency streaming.
if (config['streaming'] && config['streaming']['lowLatencyMode']) {
if (config['streaming']['inaccurateManifestTolerance'] == undefined) {
config['streaming']['inaccurateManifestTolerance'] = 0;
}
if (config['streaming']['rebufferingGoal'] == undefined) {
config['streaming']['rebufferingGoal'] = 0.01;
}
}
const ret = shaka.util.PlayerConfiguration.mergeConfigObjects(
this.config_, config, this.defaultConfig_());
this.applyConfig_();
return ret;
}
/**
* Apply config changes.
* @private
*/
applyConfig_() {
if (this.parser_) {
const manifestConfig =
shaka.util.ObjectUtils.cloneObject(this.config_.manifest);
// Don't read video segments if the player is attached to an audio element
if (this.video_ && this.video_.nodeName === 'AUDIO') {
manifestConfig.disableVideo = true;
}
this.parser_.configure(manifestConfig);
}
if (this.drmEngine_) {
this.drmEngine_.configure(this.config_.drm);
}
if (this.streamingEngine_) {
this.streamingEngine_.configure(this.config_.streaming);
// Need to apply the restrictions.
try {
// this.filterManifestWithRestrictions_() may throw.
this.filterManifestWithRestrictions_(this.manifest_);
} catch (error) {
this.onError_(error);
}
if (this.abrManager_) {
// Update AbrManager variants to match these new settings.
this.updateAbrManagerVariants_();
}
// If the streams we are playing are restricted, we need to switch.
const activeVariant = this.streamingEngine_.getCurrentVariant();
if (activeVariant) {
if (!activeVariant.allowedByApplication ||
!activeVariant.allowedByKeySystem) {
shaka.log.debug('Choosing new variant after changing configuration');
this.chooseVariantAndSwitch_();
}
}
}
if (this.networkingEngine_) {
this.networkingEngine_.setForceHTTPS(this.config_.streaming.forceHTTPS);
}
if (this.mediaSourceEngine_) {
const textDisplayerFactory = this.config_.textDisplayFactory;
if (this.lastTextFactory_ != textDisplayerFactory) {
const displayer =
shaka.util.Functional.callFactory(textDisplayerFactory);
this.mediaSourceEngine_.setTextDisplayer(displayer);
this.lastTextFactory_ = textDisplayerFactory;
if (this.streamingEngine_) {
// Reload the text stream, so the cues will load again.
this.streamingEngine_.reloadTextStream();
}
}
}
if (this.abrManager_) {
this.abrManager_.configure(this.config_.abr);
// Simply enable/disable ABR with each call, since multiple calls to these
// methods have no effect.
if (this.config_.abr.enabled) {
this.abrManager_.enable();
} else {
this.abrManager_.disable();
}
this.onAbrStatusChanged_();
}
if (this.bufferObserver_) {
let rebufferThreshold = this.config_.streaming.rebufferingGoal;
if (this.manifest_) {
rebufferThreshold =
Math.max(rebufferThreshold, this.manifest_.minBufferTime);
}
this.updateBufferingSettings_(rebufferThreshold);
}
if (this.manifest_) {
shaka.Player.applyPlayRange_(this.manifest_.presentationTimeline,
this.config_.playRangeStart,
this.config_.playRangeEnd);
}
}
/**
* Return a copy of the current configuration. Modifications of the returned
* value will not affect the Player's active configuration. You must call
* <code>player.configure()</code> to make changes.
*
* @return {shaka.extern.PlayerConfiguration}
* @export
*/
getConfiguration() {
goog.asserts.assert(this.config_, 'Config must not be null!');
const ret = this.defaultConfig_();
shaka.util.PlayerConfiguration.mergeConfigObjects(
ret, this.config_, this.defaultConfig_());
return ret;
}
/**
* Return a reference to the current configuration. Modifications to the
* returned value will affect the Player's active configuration. This method
* is not exported as sharing configuration with external objects is not
* supported.
*
* @return {shaka.extern.PlayerConfiguration}
*/
getSharedConfiguration() {
goog.asserts.assert(
this.config_, 'Cannot call getSharedConfiguration after call destroy!');
return this.config_;
}
/**
* Returns the ratio of video length buffered compared to buffering Goal
* @return {number}
* @export
*/
getBufferFullness() {
if (this.video_) {
const bufferedLength = this.video_.buffered.length;
const bufferedEnd =
bufferedLength ? this.video_.buffered.end(bufferedLength - 1) : 0;
const bufferingGoal = this.getConfiguration().streaming.bufferingGoal;
const lengthToBeBuffered = Math.min(this.video_.currentTime +
bufferingGoal, this.seekRange().end);
if (bufferedEnd >= lengthToBeBuffered) {
return 1;
} else if (bufferedEnd <= this.video_.currentTime) {
return 0;
} else if (bufferedEnd < lengthToBeBuffered) {
return ((bufferedEnd - this.video_.currentTime) /
(lengthToBeBuffered - this.video_.currentTime));
}
}
return 0;
}
/**
* Reset configuration to default.
* @export
*/
resetConfiguration() {
goog.asserts.assert(this.config_, 'Cannot be destroyed');
// Remove the old keys so we remove open-ended dictionaries like drm.servers
// but keeps the same object reference.
for (const key in this.config_) {
delete this.config_[key];
}
shaka.util.PlayerConfiguration.mergeConfigObjects(
this.config_, this.defaultConfig_(), this.defaultConfig_());
this.applyConfig_();
}
/**
* Get the current load mode.
*
* @return {shaka.Player.LoadMode}
* @export
*/
getLoadMode() {
return this.loadMode_;
}
/**
* Get the media element that the player is currently using to play loaded
* content. If the player has not loaded content, this will return
* <code>null</code>.
*
* @return {HTMLMediaElement}
* @export
*/
getMediaElement() {
return this.video_;
}
/**
* @return {shaka.net.NetworkingEngine} A reference to the Player's networking
* engine. Applications may use this to make requests through Shaka's
* networking plugins.
* @export
*/
getNetworkingEngine() {
return this.networkingEngine_;
}
/**
* Get the uri to the asset that the player has loaded. If the player has not
* loaded content, this will return <code>null</code>.
*
* @return {?string}
* @export
*/
getAssetUri() {
return this.assetUri_;
}
/**
* Returns a shaka.ads.AdManager instance, responsible for Dynamic
* Ad Insertion functionality.
*
* @return {shaka.extern.IAdManager}
* @export
*/
getAdManager() {
// NOTE: this clause is redundant, but it keeps the compiler from
// inlining this function. Inlining leads to setting the adManager
// not taking effect in the compiled build.
// Closure has a @noinline flag, but apparently not all cases are
// supported by it, and ours isn't.
// If they expand support, we might be able to get rid of this
// clause.
if (!this.adManager_) {
return null;
}
return this.adManager_;
}
/**
* Get if the player is playing live content. If the player has not loaded
* content, this will return <code>false</code>.
*
* @return {boolean}
* @export
*/
isLive() {
if (this.manifest_) {
return this.manifest_.presentationTimeline.isLive();
}
// For native HLS, the duration for live streams seems to be Infinity.
if (this.video_ && this.video_.src) {
return this.video_.duration == Infinity;
}
return false;
}
/**
* Get if the player is playing in-progress content. If the player has not
* loaded content, this will return <code>false</code>.
*
* @return {boolean}
* @export
*/
isInProgress() {
return this.manifest_ ?
this.manifest_.presentationTimeline.isInProgress() :
false;
}
/**
* Check if the manifest contains only audio-only content. If the player has
* not loaded content, this will return <code>false</code>.
*
* <p>
* The player does not support content that contain more than one type of
* variants (i.e. mixing audio-only, video-only, audio-video). Content will be
* filtered to only contain one type of variant.
*
* @return {boolean}
* @export
*/
isAudioOnly() {
if (this.manifest_) {
const variants = this.manifest_.variants;
if (!variants.length) {
return false;
}
// Note that if there are some audio-only variants and some audio-video
// variants, the audio-only variants are removed during filtering.
// Therefore if the first variant has no video, that's sufficient to say
// it is audio-only content.
return !variants[0].video;
} else if (this.video_ && this.video_.src) {
// If we have video track info, use that. It will be the least
// error-prone way with native HLS. In contrast, videoHeight might be
// unset until the first frame is loaded. Since isAudioOnly is queried
// by the UI on the 'trackschanged' event, the videoTracks info should be
// up-to-date.
if (this.video_.videoTracks) {
return this.video_.videoTracks.length == 0;
}
// We cast to the more specific HTMLVideoElement to access videoHeight.
// This might be an audio element, though, in which case videoHeight will
// be undefined at runtime. For audio elements, this will always return
// true.
const video = /** @type {HTMLVideoElement} */(this.video_);
return video.videoHeight == 0;
} else {
return false;
}
}
/**
* Return the value of lowLatencyMode configuration.
* @return {boolean}
* @private
*/
isLowLatencyMode_() {
return this.config_.streaming.lowLatencyMode;
}
/**
* Return the value of autoLowLatencyMode configuration.
* @return {boolean}
* @private
*/
isAutoLowLatencyMode_() {
return this.config_.streaming.autoLowLatencyMode;
}
/**
* Get the range of time (in seconds) that seeking is allowed. If the player
* has not loaded content, this will return a range from 0 to 0.
*
* @return {{start: number, end: number}}
* @export
*/
seekRange() {
if (this.manifest_) {
const timeline = this.manifest_.presentationTimeline;
return {
'start': timeline.getSeekRangeStart(),
'end': timeline.getSeekRangeEnd(),
};
}
// If we have loaded content with src=, we ask the video element for its
// seekable range. This covers both plain mp4s and native HLS playbacks.
if (this.video_ && this.video_.src) {
const seekable = this.video_.seekable;
if (seekable.length) {
return {
'start': seekable.start(0),
'end': seekable.end(seekable.length - 1),
};
}
}
return {'start': 0, 'end': 0};
}
/**
* Go to live in a live stream.
*
* @export
*/
goToLive() {
if (this.isLive()) {
this.video_.currentTime = this.seekRange().end;
} else {
shaka.log.warning('goToLive is for live streams!');
}
}
/**
* Get the key system currently used by EME. If EME is not being used, this
* will return an empty string. If the player has not loaded content, this
* will return an empty string.
*
* @return {string}
* @export
*/
keySystem() {
return shaka.media.DrmEngine.keySystem(this.drmInfo());
}
/**
* Get the drm info used to initialize EME. If EME is not being used, this
* will return <code>null</code>. If the player is idle or has not initialized
* EME yet, this will return <code>null</code>.
*
* @return {?shaka.extern.DrmInfo}
* @export
*/
drmInfo() {
return this.drmEngine_ ? this.drmEngine_.getDrmInfo() : null;
}
/**
* Get the drm engine.
* This method should only be used for testing. Applications SHOULD NOT
* use this in production.
*
* @return {?shaka.media.DrmEngine}
*/
getDrmEngine() {
return this.drmEngine_;
}
/**
* Get the next known expiration time for any EME session. If the session
* never expires, this will return <code>Infinity</code>. If there are no EME
* sessions, this will return <code>Infinity</code>. If the player has not
* loaded content, this will return <code>Infinity</code>.
*
* @return {number}
* @export
*/
getExpiration() {
return this.drmEngine_ ? this.drmEngine_.getExpiration() : Infinity;
}
/**
* Gets a map of EME key ID to the current key status.
*
* @return {!Object<string, string>}
* @export
*/
getKeyStatuses() {
return this.drmEngine_ ? this.drmEngine_.getKeyStatuses() : {};
}
/**
* Check if the player is currently in a buffering state (has too little
* content to play smoothly). If the player has not loaded content, this will
* return <code>false</code>.
*
* @return {boolean}
* @export
*/
isBuffering() {
const State = shaka.media.BufferingObserver.State;
return this.bufferObserver_ ?
this.bufferObserver_.getState() == State.STARVING :
false;
}
/**
* Get the playback rate of what is playing right now. If we are using trick
* play, this will return the trick play rate.
* If no content is playing, this will return 0.
* If content is buffering, this will return the expected playback rate once
* the video starts playing.
*
* <p>
* If the player has not loaded content, this will return a playback rate of
* 0.
*
* @return {number}
* @export
*/
getPlaybackRate() {
if (!this.video_) {
return 0;
}
return this.playRateController_ ?
this.playRateController_.getRealRate() :
1;
}
/**
* Enable trick play to skip through content without playing by repeatedly
* seeking. For example, a rate of 2.5 would result in 2.5 seconds of content
* being skipped every second. A negative rate will result in moving
* backwards.
*
* <p>
* If the player has not loaded content or is still loading content this will
* be a no-op. Wait until <code>load</code> has completed before calling.
*
* <p>
* Trick play will be canceled automatically if the playhead hits the
* beginning or end of the seekable range for the content.
*
* @param {number} rate
* @export
*/
trickPlay(rate) {
// A playbackRate of 0 is used internally when we are in a buffering state,
// and doesn't make sense for trick play. If you set a rate of 0 for trick
// play, we will reject it and issue a warning. If it happens during a
// test, we will fail the test through this assertion.
goog.asserts.assert(rate != 0, 'Should never set a trick play rate of 0!');
if (rate == 0) {
shaka.log.alwaysWarn('A trick play rate of 0 is unsupported!');
return;
}
if (this.video_.paused) {
// Our fast forward is implemented with playbackRate and needs the video
// to be playing (to not be paused) to take immediate effect.
// If the video is paused, "unpause" it.
this.video_.play();
}
this.playRateController_.set(rate);
if (this.loadMode_ == shaka.Player.LoadMode.MEDIA_SOURCE) {
this.abrManager_.playbackRateChanged(rate);
this.streamingEngine_.setTrickPlay(Math.abs(rate) > 1);
}
}
/**
* Cancel trick-play. If the player has not loaded content or is still loading
* content this will be a no-op.
*
* @export
*/
cancelTrickPlay() {
const defaultPlaybackRate = this.playRateController_.getDefaultRate();
if (this.loadMode_ == shaka.Player.LoadMode.SRC_EQUALS) {
this.playRateController_.set(defaultPlaybackRate);
}
if (this.loadMode_ == shaka.Player.LoadMode.MEDIA_SOURCE) {
this.playRateController_.set(defaultPlaybackRate);
this.abrManager_.playbackRateChanged(defaultPlaybackRate);
this.streamingEngine_.setTrickPlay(false);
}
}
/**
* Return a list of variant tracks that can be switched to.
*
* <p>
* If the player has not loaded content, this will return an empty list.
*
* @return {!Array.<shaka.extern.Track>}
* @export
*/
getVariantTracks() {
if (this.manifest_) {
const currentVariant = this.streamingEngine_ ?
this.streamingEngine_.getCurrentVariant() : null;
const tracks = [];
// Convert each variant to a track.
for (const variant of this.manifest_.variants) {
if (!shaka.util.StreamUtils.isPlayable(variant)) {
continue;
}
const track = shaka.util.StreamUtils.variantToTrack(variant);
track.active = variant == currentVariant;
tracks.push(track);
}
return tracks;
} else if (this.video_ && this.video_.audioTracks) {
// Safari's native HLS always shows a single element in videoTracks.
// You can't use that API to change resolutions. But we can use
// audioTracks to generate a variant list that is usable for changing
// languages.
const audioTracks = Array.from(this.video_.audioTracks);
return audioTracks.map((audio) =>
shaka.util.StreamUtils.html5AudioTrackToTrack(audio));
} else {
return [];
}
}
/**
* Return a list of text tracks that can be switched to.
*
* <p>
* If the player has not loaded content, this will return an empty list.
*
* @return {!Array.<shaka.extern.Track>}
* @export
*/
getTextTracks() {
if (this.manifest_) {
const currentTextStream = this.streamingEngine_ ?
this.streamingEngine_.getCurrentTextStream() : null;
const tracks = [];
// Convert all selectable text streams to tracks.
for (const text of this.manifest_.textStreams) {
const track = shaka.util.StreamUtils.textStreamToTrack(text);
track.active = text == currentTextStream;
tracks.push(track);
}
return tracks;
} else if (this.video_ && this.video_.src && this.video_.textTracks) {
const textTracks = this.getFilteredTextTracks_();
const StreamUtils = shaka.util.StreamUtils;
return textTracks.map((text) => StreamUtils.html5TextTrackToTrack(text));
} else {
return [];
}
}
/**
* Return a list of image tracks that can be switched to.
*
* If the player has not loaded content, this will return an empty list.
*
* @return {!Array.<shaka.extern.Track>}
* @export
*/
getImageTracks() {
if (this.manifest_) {
const imageStreams = this.manifest_.imageStreams;
const StreamUtils = shaka.util.StreamUtils;
return imageStreams.map((image) => StreamUtils.imageStreamToTrack(image));
} else {
return [];
}
}
/**
* Return a Thumbnail object from a image track Id and time.
*
* If the player has not loaded content, this will return a null.
*
* @param {number} trackId
* @param {number} time
* @return {!Promise.<?shaka.extern.Thumbnail>}
* @export
*/
async getThumbnails(trackId, time) {
if (this.manifest_) {
const imageStream = this.manifest_.imageStreams.find(
(stream) => stream.id == trackId);
if (!imageStream) {
return null;
}
if (!imageStream.segmentIndex) {
await imageStream.createSegmentIndex();
}
const referencePosition = imageStream.segmentIndex.find(time);
if (referencePosition == null) {
return null;
}
const reference = imageStream.segmentIndex.get(referencePosition);
const tilesLayout =
reference.getTilesLayout() || imageStream.tilesLayout;
// This expression is used to detect one or more numbers (0-9) followed
// by an x and after one or more numbers (0-9)
const match = /(\d+)x(\d+)/.exec(tilesLayout);
if (!match) {
shaka.log.warning('Tiles layout does not contain a valid format ' +
' (columns x rows)');
return null;
}
const fullImageWidth = imageStream.width || 0;
const fullImageHeight = imageStream.height || 0;
const columns = parseInt(match[1], 10);
const rows = parseInt(match[2], 10);
const width = fullImageWidth / columns;
const height = fullImageHeight / rows;
const totalImages = columns * rows;
const segmentDuration = reference.trueEndTime - reference.startTime;
const thumbnailDuration = segmentDuration / totalImages;
let thumbnailTime = reference.startTime;
let positionX = 0;
let positionY = 0;
// If the number of images in the segment is greater than 1, we have to
// find the correct image. For that we will return to the app the
// coordinates of the position of the correct image.
// Image search is always from left to right and top to bottom.
// Note: The time between images within the segment is always
// equidistant.
//
// Eg: Total images 5, tileLayout 5x1, segmentDuration 5, thumbnailTime 2
// positionX = 0.4 * fullImageWidth
// positionY = 0
if (totalImages > 1) {
const thumbnailPosition =
Math.floor((time - reference.startTime) / thumbnailDuration);
thumbnailTime = reference.startTime +
(thumbnailPosition * thumbnailDuration);
positionX = (thumbnailPosition % columns) * width;
positionY = Math.floor(thumbnailPosition / columns) * height;
}
return {
height: height,
positionX: positionX,
positionY: positionY,
startTime: thumbnailTime,
duration: thumbnailDuration,
uris: reference.getUris(),
width: width,
};
}
return null;
}
/**
* Select a specific text track. <code>track</code> should come from a call to
* <code>getTextTracks</code>. If the track is not found, this will be a
* no-op. If the player has not loaded content, this will be a no-op.
*
* <p>
* Note that <code>AdaptationEvents</code> are not fired for manual track
* selections.
*
* @param {shaka.extern.Track} track
* @export
*/
selectTextTrack(track) {
if (this.manifest_ && this.streamingEngine_) {
const stream = this.manifest_.textStreams.find(
(stream) => stream.id == track.id);
if (!stream) {
shaka.log.error('No stream with id', track.id);
return;
}
if (stream == this.streamingEngine_.getCurrentTextStream()) {
shaka.log.debug('Text track already selected.');
return;
}
// Add entries to the history.
this.addTextStreamToSwitchHistory_(stream, /* fromAdaptation= */ false);
this.streamingEngine_.switchTextStream(stream);
this.onTextChanged_();
// Workaround for https://github.com/google/shaka-player/issues/1299
// When track is selected, back-propagate the language to
// currentTextLanguage_.
this.currentTextLanguage_ = stream.language;
} else if (this.video_ && this.video_.src && this.video_.textTracks) {
const textTracks = this.getFilteredTextTracks_();
for (const textTrack of textTracks) {
if (shaka.util.StreamUtils.html5TrackId(textTrack) == track.id) {
// Leave the track in 'hidden' if it's selected but not showing.
textTrack.mode = this.isTextVisible_ ? 'showing' : 'hidden';
} else {
// Safari allows multiple text tracks to have mode == 'showing', so be
// explicit in resetting the others.
textTrack.mode = 'disabled';
}
}
this.onTextChanged_();
}
}
/**
* Select a specific variant track to play. <code>track</code> should come
* from a call to <code>getVariantTracks</code>. If <code>track</code> cannot
* be found, this will be a no-op. If the player has not loaded content, this
* will be a no-op.
*
* <p>
* Changing variants will take effect once the currently buffered content has
* been played. To force the change to happen sooner, use
* <code>clearBuffer</code> with <code>safeMargin</code>. Setting
* <code>clearBuffer</code> to <code>true</code> will clear all buffered
* content after <code>safeMargin</code>, allowing the new variant to start
* playing sooner.
*
* <p>
* Note that <code>AdaptationEvents</code> are not fired for manual track
* selections.
*
* @param {shaka.extern.Track} track
* @param {boolean=} clearBuffer
* @param {number=} safeMargin Optional amount of buffer (in seconds) to
* retain when clearing the buffer. Useful for switching variant quickly
* without causing a buffering event. Defaults to 0 if not provided. Ignored
* if clearBuffer is false. Can cause hiccups on some browsers if chosen too
* small, e.g. The amount of two segments is a fair minimum to consider as
* safeMargin value.
* @export
*/
selectVariantTrack(track, clearBuffer = false, safeMargin = 0) {
if (this.manifest_ && this.streamingEngine_) {
if (this.config_.abr.enabled) {
shaka.log.alwaysWarn('Changing tracks while abr manager is enabled ' +
'will likely result in the selected track ' +
'being overriden. Consider disabling abr before ' +
'calling selectVariantTrack().');
}
const variant = this.manifest_.variants.find(
(variant) => variant.id == track.id);
if (!variant) {
shaka.log.error('No variant with id', track.id);
return;
}
// Double check that the track is allowed to be played. The track list
// should only contain playable variants, but if restrictions change and
// |selectVariantTrack| is called before the track list is updated, we
// could get a now-restricted variant.
if (!shaka.util.StreamUtils.isPlayable(variant)) {
shaka.log.error('Unable to switch to restricted track', track.id);
return;
}
this.switchVariant_(variant, /* fromAdaptation= */ false, clearBuffer,
safeMargin);
// Workaround for https://github.com/google/shaka-player/issues/1299
// When track is selected, back-propagate the language to
// currentAudioLanguage_.
this.currentAdaptationSetCriteria_ = new shaka.media.ExampleBasedCriteria(
variant);
// Update AbrManager variants to match these new settings.
this.updateAbrManagerVariants_();
} else if (this.video_ && this.video_.audioTracks) {
// Safari's native HLS won't let you choose an explicit variant, though
// you can choose audio languages this way.
const audioTracks = Array.from(this.video_.audioTracks);
for (const audioTrack of audioTracks) {
if (shaka.util.StreamUtils.html5TrackId(audioTrack) == track.id) {
// This will reset the "enabled" of other tracks to false.
this.switchHtml5Track_(audioTrack);
return;
}
}
}
}
/**
* Return a list of audio language-role combinations available. If the
* player has not loaded any content, this will return an empty list.
*
* @return {!Array.<shaka.extern.LanguageRole>}
* @export
*/
getAudioLanguagesAndRoles() {
return shaka.Player.getLanguageAndRolesFrom_(this.getVariantTracks());
}
/**
* Return a list of text language-role combinations available. If the player
* has not loaded any content, this will be return an empty list.
*
* @return {!Array.<shaka.extern.LanguageRole>}
* @export
*/
getTextLanguagesAndRoles() {
return shaka.Player.getLanguageAndRolesFrom_(this.getTextTracks());
}
/**
* Return a list of audio languages available. If the player has not loaded
* any content, this will return an empty list.
*
* @return {!Array.<string>}
* @export
*/
getAudioLanguages() {
return Array.from(shaka.Player.getLanguagesFrom_(this.getVariantTracks()));
}
/**
* Return a list of text languages available. If the player has not loaded
* any content, this will return an empty list.
*
* @return {!Array.<string>}
* @export
*/
getTextLanguages() {
return Array.from(shaka.Player.getLanguagesFrom_(this.getTextTracks()));
}
/**
* Sets the current audio language and current variant role to the selected
* language and role, and chooses a new variant if need be. If the player has
* not loaded any content, this will be a no-op.
*
* @param {string} language
* @param {string=} role
* @export
*/
selectAudioLanguage(language, role) {
const LanguageUtils = shaka.util.LanguageUtils;
if (this.manifest_ && this.playhead_) {
this.currentAdaptationSetCriteria_ =
new shaka.media.PreferenceBasedCriteria(language, role || '',
/* channelCount= */ 0, /* label= */ '');
const diff = (a, b) => {
if (!a.video && !b.video) {
return 0;
} else if (!a.video || !b.video) {
return Infinity;
} else {
return Math.abs((a.video.height || 0) - (b.video.height || 0)) +
Math.abs((a.video.width || 0) - (b.video.width || 0));
}
};
// Find the variant whose size is closest to the active variant. This
// ensures we stay at about the same resolution when just changing the
// language/role.
const active = this.streamingEngine_.getCurrentVariant();
const set =
this.currentAdaptationSetCriteria_.create(this.manifest_.variants);
let bestVariant = null;
for (const curVariant of set.values()) {
if (!bestVariant ||
diff(bestVariant, active) > diff(curVariant, active)) {
bestVariant = curVariant;
}
}
if (bestVariant) {
const track = shaka.util.StreamUtils.variantToTrack(bestVariant);
this.selectVariantTrack(track, /* clearBuffer= */ true);
return;
}
// If we haven't switched yet, just use ABR to find a new track.
this.chooseVariantAndSwitch_();
} else if (this.video_ && this.video_.audioTracks) {
const audioTracks = Array.from(this.video_.audioTracks);
const selectedLanguage = LanguageUtils.normalize(language);
let languageMatch = null;
let languageAndRoleMatch = null;
for (const audioTrack of audioTracks) {
const track = shaka.util.StreamUtils.html5AudioTrackToTrack(audioTrack);
if (LanguageUtils.normalize(track.language) == selectedLanguage) {
languageMatch = audioTrack;
if (role) {
if (track.roles.includes(role)) {
languageAndRoleMatch = audioTrack;
}
} else { // no role
if (track.roles.length == 0) {
languageAndRoleMatch = audioTrack;
}
}
}
}
if (languageAndRoleMatch) {
this.switchHtml5Track_(languageAndRoleMatch);
} else if (languageMatch) {
this.switchHtml5Track_(languageMatch);
}
}
}
/**
* Sets the current text language and current text role to the selected
* language and role, and chooses a new variant if need be. If the player has
* not loaded any content, this will be a no-op.
*
* @param {string} language
* @param {string=} role
* @param {boolean=} forced
* @export
*/
selectTextLanguage(language, role, forced = false) {
const LanguageUtils = shaka.util.LanguageUtils;
if (this.manifest_ && this.playhead_) {
this.currentTextLanguage_ = language;
this.currentTextRole_ = role || '';
this.currentTextForced_ = forced;
const chosenText = this.chooseTextStream_();
if (chosenText) {
if (chosenText == this.streamingEngine_.getCurrentTextStream()) {
shaka.log.debug('Text track already selected.');
return;
}
this.addTextStreamToSwitchHistory_(
chosenText, /* fromAdaptation= */ false);
if (this.shouldStreamText_()) {
this.streamingEngine_.switchTextStream(chosenText);
this.onTextChanged_();
}
}
} else {
const selectedLanguage = LanguageUtils.normalize(language);
const track = this.getTextTracks().find((t) => {
return LanguageUtils.normalize(t.language) == selectedLanguage &&
(!role || t.roles.includes(role)) && t.forced == forced;
});
if (track) {
this.selectTextTrack(track);
}
}
}
/**
* Select variant tracks that have a given label. This assumes the
* label uniquely identifies an audio stream, so all the variants
* are expected to have the same variant.audio.
*
* @param {string} label
* @export
*/
selectVariantsByLabel(label) {
if (this.manifest_ && this.playhead_) {
let firstVariantWithLabel = null;
for (const variant of this.manifest_.variants) {
if (variant.audio.label == label) {
firstVariantWithLabel = variant;
break;
}
}
if (firstVariantWithLabel == null) {
shaka.log.warning('No variants were found with label: ' +
label + '. Ignoring the request to switch.');
return;
}
// Label is a unique identifier of a variant's audio stream.
// Because of that we assume that all the variants with the same
// label have the same language.
this.currentAdaptationSetCriteria_ =
new shaka.media.PreferenceBasedCriteria(
firstVariantWithLabel.language, '', 0, label);
this.chooseVariantAndSwitch_();
}
}
/**
* Check if the text displayer is enabled.
*
* @return {boolean}
* @export
*/
isTextTrackVisible() {
const expected = this.isTextVisible_;
if (this.mediaSourceEngine_) {
// Make sure our values are still in-sync.
const actual = this.mediaSourceEngine_.getTextDisplayer().isTextVisible();
goog.asserts.assert(
actual == expected, 'text visibility has fallen out of sync');
// Always return the actual value so that the app has the most accurate
// information (in the case that the values come out of sync in prod).
return actual;
} else if (this.video_ && this.video_.src && this.video_.textTracks) {
const textTracks = this.getFilteredTextTracks_();
return textTracks.some((t) => t.mode == 'showing');
}
return expected;
}
/**
* Return a list of chapters tracks.
*
* @return {!Array.<shaka.extern.Track>}
* @export
*/
getChaptersTracks() {
if (this.video_ && this.video_.src && this.video_.textTracks) {
const textTracks = this.getChaptersTracks_();
const StreamUtils = shaka.util.StreamUtils;
return textTracks.map((text) => StreamUtils.html5TextTrackToTrack(text));
} else {
return [];
}
}
/**
* This returns the list of chapters.
*
* @param {string} language
* @return {!Array.<shaka.extern.Chapter>}
* @export
*/
getChapters(language) {
const LanguageUtils = shaka.util.LanguageUtils;
const inputlanguage = LanguageUtils.normalize(language);
const chaptersTracks = this.getChaptersTracks_();
const chaptersTrack = chaptersTracks
.find((t) => LanguageUtils.normalize(t.language) == inputlanguage);
if (!chaptersTrack || !chaptersTrack.cues) {
return [];
}
const chapters = [];
for (const cue of chaptersTrack.cues) {
/** @type {shaka.extern.Chapter} */
const chapter = {
title: cue.text,
startTime: cue.startTime,
endTime: cue.endTime,
};
chapters.push(chapter);
}
return chapters;
}
/**
* Ignore the TextTracks with the 'metadata' or 'chapters' kind, or the one
* generated by the SimpleTextDisplayer.
*
* @return {!Array.<TextTrack>}
* @private
*/
getFilteredTextTracks_() {
goog.asserts.assert(this.video_.textTracks,
'TextTracks should be valid.');
return Array.from(this.video_.textTracks)
.filter((t) => t.kind != 'metadata' && t.kind != 'chapters' &&
t.label != shaka.Player.TextTrackLabel);
}
/**
* Get the TextTracks with the 'metadata' kind.
*
* @return {!Array.<TextTrack>}
* @private
*/
getMetadataTracks_() {
goog.asserts.assert(this.video_.textTracks,
'TextTracks should be valid.');
return Array.from(this.video_.textTracks)
.filter((t) => t.kind == 'metadata');
}
/**
* Get the TextTracks with the 'chapters' kind.
*
* @return {!Array.<TextTrack>}
* @private
*/
getChaptersTracks_() {
goog.asserts.assert(this.video_.textTracks,
'TextTracks should be valid.');
return Array.from(this.video_.textTracks)
.filter((t) => t.kind == 'chapters');
}
/**
* Enable or disable the text displayer. If the player is in an unloaded
* state, the request will be applied next time content is loaded.
*
* @param {boolean} isVisible
* @export
*/
setTextTrackVisibility(isVisible) {
const oldVisibilty = this.isTextVisible_;
// Convert to boolean in case apps pass 0/1 instead false/true.
const newVisibility = !!isVisible;
if (oldVisibilty == newVisibility) {
return;
}
this.isTextVisible_ = newVisibility;
// Hold of on setting the text visibility until we have all the components
// we need. This ensures that they stay in-sync.
if (this.loadMode_ == shaka.Player.LoadMode.MEDIA_SOURCE) {
this.mediaSourceEngine_.getTextDisplayer()
.setTextVisibility(newVisibility);
// When the user wants to see captions, we stream captions. When the user
// doesn't want to see captions, we don't stream captions. This is to
// avoid bandwidth consumption by an unused resource. The app developer
// can override this and configure us to always stream captions.
if (!this.config_.streaming.alwaysStreamText) {
if (newVisibility) {
if (this.streamingEngine_.getCurrentTextStream()) {
// We already have a selected text stream.
} else {
// Find the text stream that best matches the user's preferences.
const streams =
shaka.util.StreamUtils.filterStreamsByLanguageAndRole(
this.manifest_.textStreams,
this.currentTextLanguage_,
this.currentTextRole_,
this.currentTextForced_);
// It is possible that there are no streams to play.
if (streams.length > 0) {
this.streamingEngine_.switchTextStream(streams[0]);
this.onTextChanged_();
}
}
} else {
this.streamingEngine_.unloadTextStream();
}
}
} else if (this.video_ && this.video_.src && this.video_.textTracks) {
const textTracks = this.getFilteredTextTracks_();
// Find the active track by looking for one which is not disabled. This
// is the only way to identify the track which is currently displayed.
// Set it to 'showing' or 'hidden' based on newVisibility.
for (const textTrack of textTracks) {
if (textTrack.mode != 'disabled') {
textTrack.mode = newVisibility ? 'showing' : 'hidden';
}
}
}
// We need to fire the event after we have updated everything so that
// everything will be in a stable state when the app responds to the
// event.
this.onTextTrackVisibility_();
}
/**
* Get the current playhead position as a date. This should only be called
* when the player has loaded a live stream. If the player has not loaded a
* live stream, this will return <code>null</code>.
*
* @return {Date}
* @export
*/
getPlayheadTimeAsDate() {
if (!this.isLive()) {
shaka.log.warning('getPlayheadTimeAsDate is for live streams!');
return null;
}
const walkerPayload = this.walker_.getCurrentPayload();
let presentationTime = 0;
if (this.playhead_) {
presentationTime = this.playhead_.getTime();
} else if (walkerPayload) {
if (walkerPayload.startTime == null) {
// A live stream with no requested start time and no playhead yet. We
// would start at the live edge, but we don't have that yet, so return
// the current date & time.
return new Date();
} else {
// A specific start time has been requested. This is what Playhead will
// use once it is created.
presentationTime = walkerPayload.startTime;
}
}
if (this.manifest_) {
const timeline = this.manifest_.presentationTimeline;
const startTime = timeline.getPresentationStartTime();
return new Date(/* ms= */ (startTime + presentationTime) * 1000);
} else if (this.video_ && this.video_.getStartDate) {
// Apple's native HLS gives us getStartDate(), which is only available if
// EXT-X-PROGRAM-DATETIME is in the playlist.
const startDate = this.video_.getStartDate();
if (isNaN(startDate.getTime())) {
shaka.log.warning(
'EXT-X-PROGRAM-DATETIME required to get playhead time as Date!');
return null;
}
return new Date(startDate.getTime() + (presentationTime * 1000));
} else {
shaka.log.warning('No way to get playhead time as Date!');
return null;
}
}
/**
* Get the presentation start time as a date. This should only be called when
* the player has loaded a live stream. If the player has not loaded a live
* stream, this will return <code>null</code>.
*
* @return {Date}
* @export
*/
getPresentationStartTimeAsDate() {
if (!this.isLive()) {
shaka.log.warning('getPresentationStartTimeAsDate is for live streams!');
return null;
}
if (this.manifest_) {
const timeline = this.manifest_.presentationTimeline;
const startTime = timeline.getPresentationStartTime();
goog.asserts.assert(startTime != null,
'Presentation start time should not be null!');
return new Date(/* ms= */ startTime * 1000);
} else if (this.video_ && this.video_.getStartDate) {
// Apple's native HLS gives us getStartDate(), which is only available if
// EXT-X-PROGRAM-DATETIME is in the playlist.
const startDate = this.video_.getStartDate();
if (isNaN(startDate.getTime())) {
shaka.log.warning(
'EXT-X-PROGRAM-DATETIME required to get presentation start time ' +
'as Date!');
return null;
}
return startDate;
} else {
shaka.log.warning('No way to get presentation start time as Date!');
return null;
}
}
/**
* Get information about what the player has buffered. If the player has not
* loaded content or is currently loading content, the buffered content will
* be empty.
*
* @return {shaka.extern.BufferedInfo}
* @export
*/
getBufferedInfo() {
if (this.loadMode_ == shaka.Player.LoadMode.MEDIA_SOURCE) {
return this.mediaSourceEngine_.getBufferedInfo();
}
const info = {
total: [],
audio: [],
video: [],
text: [],
};
if (this.loadMode_ == shaka.Player.LoadMode.SRC_EQUALS) {
const TimeRangesUtils = shaka.media.TimeRangesUtils;
info.total = TimeRangesUtils.getBufferedInfo(this.video_.buffered);
}
return info;
}
/**
* Get statistics for the current playback session. If the player is not
* playing content, this will return an empty stats object.
*
* @return {shaka.extern.Stats}
* @export
*/
getStats() {
// If the Player is not in a fully-loaded state, then return an empty stats
// blob so that this call will never fail.
const loaded = this.loadMode_ == shaka.Player.LoadMode.MEDIA_SOURCE ||
this.loadMode_ == shaka.Player.LoadMode.SRC_EQUALS;
if (!loaded) {
return shaka.util.Stats.getEmptyBlob();
}
this.updateStateHistory_();
goog.asserts.assert(this.video_, 'If we have stats, we should have video_');
const element = /** @type {!HTMLVideoElement} */ (this.video_);
const completionRatio = element.currentTime / element.duration;
if (!isNaN(completionRatio)) {
this.stats_.setCompletionPercent(Math.round(100 * completionRatio));
}
if (element.getVideoPlaybackQuality) {
const info = element.getVideoPlaybackQuality();
this.stats_.setDroppedFrames(
Number(info.droppedVideoFrames),
Number(info.totalVideoFrames));
this.stats_.setCorruptedFrames(Number(info.corruptedVideoFrames));
}
const licenseSeconds =
this.drmEngine_ ? this.drmEngine_.getLicenseTime() : NaN;
this.stats_.setLicenseTime(licenseSeconds);
if (this.loadMode_ == shaka.Player.LoadMode.MEDIA_SOURCE) {
// Event through we are loaded, it is still possible that we don't have a
// variant yet because we set the load mode before we select the first
// variant to stream.
const variant = this.streamingEngine_.getCurrentVariant();
if (variant) {
const rate = this.playRateController_ ?
this.playRateController_.getRealRate() : 1;
const variantBandwidth = rate * variant.bandwidth;
// TODO: Should include text bandwidth if it enabled.
const currentStreamBandwidth = variantBandwidth;
this.stats_.setCurrentStreamBandwidth(currentStreamBandwidth);
}
if (variant && variant.video) {
this.stats_.setResolution(
/* width= */ variant.video.width || NaN,
/* height= */ variant.video.height || NaN);
}
if (this.isLive()) {
const now = this.getPresentationStartTimeAsDate().valueOf() +
this.seekRange().end * 1000;
const latency = (Date.now() - now) / 1000;
this.stats_.setLiveLatency(latency);
}
if (this.manifest_ && this.manifest_.presentationTimeline) {
const maxSegmentDuration =
this.manifest_.presentationTimeline.getMaxSegmentDuration();
this.stats_.setMaxSegmentDuration(maxSegmentDuration);
}
const estimate = this.abrManager_.getBandwidthEstimate();
this.stats_.setBandwidthEstimate(estimate);
}
return this.stats_.getBlob();
}
/**
* Adds the given text track to the loaded manifest. <code>load()</code> must
* resolve before calling. The presentation must have a duration.
*
* This returns the created track, which can immediately be selected by the
* application. The track will not be automatically selected.
*
* @param {string} uri
* @param {string} language
* @param {string} kind
* @param {string=} mimeType
* @param {string=} codec
* @param {string=} label
* @param {boolean=} forced
* @return {shaka.extern.Track}
* @export
*/
addTextTrack(uri, language, kind, mimeType, codec, label, forced = false) {
shaka.Deprecate.deprecateFeature(4,
'addTextTrack',
'Please use an addTextTrackAsync.');
if (this.loadMode_ != shaka.Player.LoadMode.MEDIA_SOURCE &&
this.loadMode_ != shaka.Player.LoadMode.SRC_EQUALS) {
shaka.log.error(
'Must call load() and wait for it to resolve before adding text ' +
'tracks.');
throw new shaka.util.Error(
shaka.util.Error.Severity.RECOVERABLE,
shaka.util.Error.Category.PLAYER,
shaka.util.Error.Code.CONTENT_NOT_LOADED);
}
if (!mimeType) {
// Try using the uri extension.
const extension = shaka.media.ManifestParser.getExtension(uri);
mimeType = shaka.Player.TEXT_EXTENSIONS_TO_MIME_TYPES_[extension];
if (!mimeType) {
shaka.log.error(
'The mimeType has not been provided and it could not be deduced ' +
'from its extension.');
throw new shaka.util.Error(
shaka.util.Error.Severity.RECOVERABLE,
shaka.util.Error.Category.TEXT,
shaka.util.Error.Code.TEXT_COULD_NOT_GUESS_MIME_TYPE,
extension);
}
}
if (this.loadMode_ == shaka.Player.LoadMode.SRC_EQUALS) {
if (mimeType != 'text/vtt') {
shaka.log.error('Only WebVTT is supported when using src=');
throw new shaka.util.Error(
shaka.util.Error.Severity.RECOVERABLE,
shaka.util.Error.Category.TEXT,
shaka.util.Error.Code.TEXT_ONLY_WEBVTT_SRC_EQUALS,
mimeType);
}
if (forced) {
// See: https://github.com/whatwg/html/issues/4472
kind = 'forced';
}
const trackElement =
/** @type {!HTMLTrackElement} */(document.createElement('track'));
trackElement.src = uri;
trackElement.label = label || '';
trackElement.kind = kind;
trackElement.srclang = language;
// Because we're pulling in the text track file via Javascript, the
// same-origin policy applies. If you'd like to have a player served
// from one domain, but the text track served from another, you'll
// need to enable CORS in order to do so. In addition to enabling CORS
// on the server serving the text tracks, you will need to add the
// crossorigin attribute to the video element itself.
if (!this.video_.getAttribute('crossorigin')) {
this.video_.setAttribute('crossorigin', 'anonymous');
}
this.video_.appendChild(trackElement);
const textTracks = this.getTextTracks();
const srcTrack = textTracks.find((t) => {
return t.language == language &&
t.label == (label || '') &&
t.kind == kind;
});
if (srcTrack) {
this.onTracksChanged_();
return srcTrack;
}
// This should not happen, but there are browser implementations that may
// not support the Track element.
shaka.log.error('Cannot add this text when loaded with src=');
throw new shaka.util.Error(
shaka.util.Error.Severity.RECOVERABLE,
shaka.util.Error.Category.TEXT,
shaka.util.Error.Code.CANNOT_ADD_EXTERNAL_TEXT_TO_SRC_EQUALS);
}
const ContentType = shaka.util.ManifestParserUtils.ContentType;
const duration = this.manifest_.presentationTimeline.getDuration();
if (duration == Infinity) {
throw new shaka.util.Error(
shaka.util.Error.Severity.RECOVERABLE,
shaka.util.Error.Category.MANIFEST,
shaka.util.Error.Code.CANNOT_ADD_EXTERNAL_TEXT_TO_LIVE_STREAM);
}
/** @type {shaka.extern.Stream} */
const stream = {
id: this.nextExternalStreamId_++,
originalId: null,
createSegmentIndex: () => Promise.resolve(),
segmentIndex: shaka.media.SegmentIndex.forSingleSegment(
/* startTime= */ 0,
/* duration= */ duration,
/* uris= */ [uri]),
mimeType: mimeType || '',
codecs: codec || '',
kind: kind,
encrypted: false,
drmInfos: [],
keyIds: new Set(),
language: language,
label: label || null,
type: ContentType.TEXT,
primary: false,
trickModeVideo: null,
emsgSchemeIdUris: null,
roles: [],
forced: !!forced,
channelsCount: null,
audioSamplingRate: null,
spatialAudio: false,
closedCaptions: null,
};
const fullMimeType = shaka.util.MimeUtils.getFullType(
stream.mimeType, stream.codecs);
const supported = shaka.text.TextEngine.isTypeSupported(fullMimeType);
if (!supported) {
throw new shaka.util.Error(
shaka.util.Error.Severity.CRITICAL,
shaka.util.Error.Category.TEXT,
shaka.util.Error.Code.MISSING_TEXT_PLUGIN,
mimeType);
}
this.manifest_.textStreams.push(stream);
this.onTracksChanged_();
return shaka.util.StreamUtils.textStreamToTrack(stream);
}
/**
* Adds the given text track to the loaded manifest. <code>load()</code> must
* resolve before calling. The presentation must have a duration.
*
* This returns the created track, which can immediately be selected by the
* application. The track will not be automatically selected.
*
* @param {string} uri
* @param {string} language
* @param {string} kind
* @param {string=} mimeType
* @param {string=} codec
* @param {string=} label
* @param {boolean=} forced
* @return {!Promise.<shaka.extern.Track>}
* @export
*/
async addTextTrackAsync(uri, language, kind, mimeType, codec, label,
forced = false) {
if (this.loadMode_ != shaka.Player.LoadMode.MEDIA_SOURCE &&
this.loadMode_ != shaka.Player.LoadMode.SRC_EQUALS) {
shaka.log.error(
'Must call load() and wait for it to resolve before adding text ' +
'tracks.');
throw new shaka.util.Error(
shaka.util.Error.Severity.RECOVERABLE,
shaka.util.Error.Category.PLAYER,
shaka.util.Error.Code.CONTENT_NOT_LOADED);
}
if (!mimeType) {
mimeType = await this.getTextMimetype_(uri);
}
if (this.loadMode_ == shaka.Player.LoadMode.SRC_EQUALS) {
if (forced) {
// See: https://github.com/whatwg/html/issues/4472
kind = 'forced';
}
await this.addSrcTrackElement_(uri, language, kind, mimeType, label);
const textTracks = this.getTextTracks();
const srcTrack = textTracks.find((t) => {
return t.language == language &&
t.label == (label || '') &&
t.kind == kind;
});
if (srcTrack) {
this.onTracksChanged_();
return srcTrack;
}
// This should not happen, but there are browser implementations that may
// not support the Track element.
shaka.log.error('Cannot add this text when loaded with src=');
throw new shaka.util.Error(
shaka.util.Error.Severity.RECOVERABLE,
shaka.util.Error.Category.TEXT,
shaka.util.Error.Code.CANNOT_ADD_EXTERNAL_TEXT_TO_SRC_EQUALS);
}
const ContentType = shaka.util.ManifestParserUtils.ContentType;
const duration = this.manifest_.presentationTimeline.getDuration();
if (duration == Infinity) {
throw new shaka.util.Error(
shaka.util.Error.Severity.RECOVERABLE,
shaka.util.Error.Category.MANIFEST,
shaka.util.Error.Code.CANNOT_ADD_EXTERNAL_TEXT_TO_LIVE_STREAM);
}
/** @type {shaka.extern.Stream} */
const stream = {
id: this.nextExternalStreamId_++,
originalId: null,
createSegmentIndex: () => Promise.resolve(),
segmentIndex: shaka.media.SegmentIndex.forSingleSegment(
/* startTime= */ 0,
/* duration= */ duration,
/* uris= */ [uri]),
mimeType: mimeType || '',
codecs: codec || '',
kind: kind,
encrypted: false,
drmInfos: [],
keyIds: new Set(),
language: language,
label: label || null,
type: ContentType.TEXT,
primary: false,
trickModeVideo: null,
emsgSchemeIdUris: null,
roles: [],
forced: !!forced,
channelsCount: null,
audioSamplingRate: null,
spatialAudio: false,
closedCaptions: null,
};
const fullMimeType = shaka.util.MimeUtils.getFullType(
stream.mimeType, stream.codecs);
const supported = shaka.text.TextEngine.isTypeSupported(fullMimeType);
if (!supported) {
throw new shaka.util.Error(
shaka.util.Error.Severity.CRITICAL,
shaka.util.Error.Category.TEXT,
shaka.util.Error.Code.MISSING_TEXT_PLUGIN,
mimeType);
}
this.manifest_.textStreams.push(stream);
this.onTracksChanged_();
return shaka.util.StreamUtils.textStreamToTrack(stream);
}
/**
* Adds the given chapters track to the loaded manifest. <code>load()</code>
* must resolve before calling. The presentation must have a duration.
*
* This returns the created track.
*
* @param {string} uri
* @param {string} language
* @param {string=} mimeType
* @return {!Promise.<shaka.extern.Track>}
* @export
*/
async addChaptersTrack(uri, language, mimeType) {
if (this.loadMode_ != shaka.Player.LoadMode.MEDIA_SOURCE &&
this.loadMode_ != shaka.Player.LoadMode.SRC_EQUALS) {
shaka.log.error(
'Must call load() and wait for it to resolve before adding ' +
'chapters tracks.');
throw new shaka.util.Error(
shaka.util.Error.Severity.RECOVERABLE,
shaka.util.Error.Category.PLAYER,
shaka.util.Error.Code.CONTENT_NOT_LOADED);
}
if (!mimeType) {
mimeType = await this.getTextMimetype_(uri);
}
await this.addSrcTrackElement_(uri, language, /* kind= */ 'chapters',
mimeType);
const chaptersTracks = this.getChaptersTracks();
const chaptersTrack = chaptersTracks.find((t) => {
return t.language == language;
});
if (chaptersTrack) {
const html5ChaptersTracks = this.getChaptersTracks_();
for (const html5ChaptersTrack of html5ChaptersTracks) {
this.processChaptersTrack_(html5ChaptersTrack);
}
return chaptersTrack;
}
// This should not happen, but there are browser implementations that may
// not support the Track element.
shaka.log.error('Cannot add this text when loaded with src=');
throw new shaka.util.Error(
shaka.util.Error.Severity.RECOVERABLE,
shaka.util.Error.Category.TEXT,
shaka.util.Error.Code.CANNOT_ADD_EXTERNAL_TEXT_TO_SRC_EQUALS);
}
/**
* @param {string} uri
* @return {!Promise.<string>}
* @private
*/
async getTextMimetype_(uri) {
// Try using the uri extension.
const extension = shaka.media.ManifestParser.getExtension(uri);
let mimeType = shaka.Player.TEXT_EXTENSIONS_TO_MIME_TYPES_[extension];
if (mimeType) {
return mimeType;
}
try {
goog.asserts.assert(
this.networkingEngine_, 'Need networking engine.');
// eslint-disable-next-line require-atomic-updates
mimeType = await shaka.media.ManifestParser.getMimeType(uri,
this.networkingEngine_,
this.config_.streaming.retryParameters);
} catch (error) {}
if (mimeType) {
return mimeType;
}
shaka.log.error(
'The mimeType has not been provided and it could not be deduced ' +
'from its extension.');
throw new shaka.util.Error(
shaka.util.Error.Severity.RECOVERABLE,
shaka.util.Error.Category.TEXT,
shaka.util.Error.Code.TEXT_COULD_NOT_GUESS_MIME_TYPE,
extension);
}
/**
* @param {string} uri
* @param {string} language
* @param {string} kind
* @param {string} mimeType
* @param {string=} label
* @private
*/
async addSrcTrackElement_(uri, language, kind, mimeType, label) {
if (mimeType != 'text/vtt') {
goog.asserts.assert(
this.networkingEngine_, 'Need networking engine.');
const data = await this.getTextData_(uri,
this.networkingEngine_,
this.config_.streaming.retryParameters);
const vvtText = this.convertToWebVTT_(data, mimeType);
const blob = new Blob([vvtText], {type: 'text/vtt'});
uri = shaka.media.MediaSourceEngine.createObjectURL(blob);
mimeType = 'text/vtt';
}
const trackElement =
/** @type {!HTMLTrackElement} */(document.createElement('track'));
trackElement.src = uri;
trackElement.label = label || '';
trackElement.kind = kind;
trackElement.srclang = language;
// Because we're pulling in the text track file via Javascript, the
// same-origin policy applies. If you'd like to have a player served
// from one domain, but the text track served from another, you'll
// need to enable CORS in order to do so. In addition to enabling CORS
// on the server serving the text tracks, you will need to add the
// crossorigin attribute to the video element itself.
if (!this.video_.getAttribute('crossorigin')) {
this.video_.setAttribute('crossorigin', 'anonymous');
}
this.video_.appendChild(trackElement);
}
/**
* @param {string} uri
* @param {!shaka.net.NetworkingEngine} netEngine
* @param {shaka.extern.RetryParameters} retryParams
* @return {!Promise.<BufferSource>}
* @private
*/
async getTextData_(uri, netEngine, retryParams) {
const type = shaka.net.NetworkingEngine.RequestType.SEGMENT;
const request = shaka.net.NetworkingEngine.makeRequest([uri], retryParams);
request.method = 'GET';
const response = await netEngine.request(type, request).promise;
return response.data;
}
/**
* Converts an input string to a WebVTT format string.
*
* @param {BufferSource} buffer
* @param {string} mimeType
* @return {string}
* @private
*/
convertToWebVTT_(buffer, mimeType) {
const factory = shaka.text.TextEngine.findParser(mimeType);
if (factory) {
const obj = factory();
const time = {
periodStart: 0,
segmentStart: 0,
segmentEnd: this.video_.duration,
};
const data = shaka.util.BufferUtils.toUint8(buffer);
const cues = obj.parseMedia(data, time);
return shaka.text.WebVttGenerator.convert(cues);
}
throw new shaka.util.Error(
shaka.util.Error.Severity.CRITICAL,
shaka.util.Error.Category.TEXT,
shaka.util.Error.Code.MISSING_TEXT_PLUGIN,
mimeType);
}
/**
* Set the maximum resolution that the platform's hardware can handle.
* This will be called automatically by <code>shaka.cast.CastReceiver</code>
* to enforce limitations of the Chromecast hardware.
*
* @param {number} width
* @param {number} height
* @export
*/
setMaxHardwareResolution(width, height) {
this.maxHwRes_.width = width;
this.maxHwRes_.height = height;
}
/**
* Retry streaming after a streaming failure has occurred. When the player has
* not loaded content or is loading content, this will be a no-op and will
* return <code>false</code>.
*
* <p>
* If the player has loaded content, and streaming has not seen an error, this
* will return <code>false</code>.
*
* <p>
* If the player has loaded content, and streaming seen an error, but the
* could not resume streaming, this will return <code>false</code>.
*
* @return {boolean}
* @export
*/
retryStreaming() {
return this.loadMode_ == shaka.Player.LoadMode.MEDIA_SOURCE ?
this.streamingEngine_.retry() :
false;
}
/**
* Get the manifest that the player has loaded. If the player has not loaded
* any content, this will return <code>null</code>.
*
* NOTE: This structure is NOT covered by semantic versioning compatibility
* guarantees. It may change at any time!
*
* This is marked as deprecated to warn Closure Compiler users at compile-time
* to avoid using this method.
*
* @return {?shaka.extern.Manifest}
* @export
* @deprecated
*/
getManifest() {
shaka.log.alwaysWarn(
'Shaka Player\'s internal Manifest structure is NOT covered by ' +
'semantic versioning compatibility guarantees. It may change at any ' +
'time! Please consider filing a feature request for whatever you ' +
'use getManifest() for.');
return this.manifest_;
}
/**
* Get the type of manifest parser that the player is using. If the player has
* not loaded any content, this will return <code>null</code>.
*
* @return {?shaka.extern.ManifestParser.Factory}
* @export
*/
getManifestParserFactory() {
return this.parserFactory_;
}
/**
* @param {shaka.extern.Variant} variant
* @param {boolean} fromAdaptation
* @private
*/
addVariantToSwitchHistory_(variant, fromAdaptation) {
const switchHistory = this.stats_.getSwitchHistory();
switchHistory.updateCurrentVariant(variant, fromAdaptation);
}
/**
* @param {shaka.extern.Stream} textStream
* @param {boolean} fromAdaptation
* @private
*/
addTextStreamToSwitchHistory_(textStream, fromAdaptation) {
const switchHistory = this.stats_.getSwitchHistory();
switchHistory.updateCurrentText(textStream, fromAdaptation);
}
/**
* @return {shaka.extern.PlayerConfiguration}
* @private
*/
defaultConfig_() {
const config = shaka.util.PlayerConfiguration.createDefault();
config.streaming.failureCallback = (error) => {
this.defaultStreamingFailureCallback_(error);
};
// Because this.video_ may not be set when the config is built, the default
// TextDisplay factory must capture a reference to "this".
config.textDisplayFactory = () => {
if (this.videoContainer_) {
return new shaka.text.UITextDisplayer(
this.video_, this.videoContainer_);
} else {
return new shaka.text.SimpleTextDisplayer(this.video_);
}
};
return config;
}
/**
* Set the videoContainer to construct UITextDisplayer.
* @param {HTMLElement} videoContainer
* @export
*/
setVideoContainer(videoContainer) {
this.videoContainer_ = videoContainer;
}
/**
* @param {!shaka.util.Error} error
* @private
*/
defaultStreamingFailureCallback_(error) {
const retryErrorCodes = [
shaka.util.Error.Code.BAD_HTTP_STATUS,
shaka.util.Error.Code.HTTP_ERROR,
shaka.util.Error.Code.TIMEOUT,
];
if (this.isLive() && retryErrorCodes.includes(error.code)) {
error.severity = shaka.util.Error.Severity.RECOVERABLE;
shaka.log.warning('Live streaming error. Retrying automatically...');
this.retryStreaming();
}
}
/**
* For CEA closed captions embedded in the video streams, create dummy text
* stream. This can be safely called again on existing manifests, for
* manifest updates.
* @param {!shaka.extern.Manifest} manifest
* @private
*/
makeTextStreamsForClosedCaptions_(manifest) {
const ContentType = shaka.util.ManifestParserUtils.ContentType;
const TextStreamKind = shaka.util.ManifestParserUtils.TextStreamKind;
const CEA608_MIME = shaka.util.MimeUtils.CEA608_CLOSED_CAPTION_MIMETYPE;
const CEA708_MIME = shaka.util.MimeUtils.CEA708_CLOSED_CAPTION_MIMETYPE;
// A set, to make sure we don't create two text streams for the same video.
const closedCaptionsSet = new Set();
for (const textStream of manifest.textStreams) {
if (textStream.mimeType == CEA608_MIME ||
textStream.mimeType == CEA708_MIME) {
// This function might be called on a manifest update, so don't make a
// new text stream for closed caption streams we have seen before.
closedCaptionsSet.add(textStream.originalId);
}
}
for (const variant of manifest.variants) {
const video = variant.video;
if (video && video.closedCaptions) {
for (const id of video.closedCaptions.keys()) {
if (!closedCaptionsSet.has(id)) {
const mimeType = id.startsWith('CC') ? CEA608_MIME : CEA708_MIME;
// Add an empty segmentIndex, for the benefit of the period combiner
// in our builtin DASH parser.
const segmentIndex = new shaka.media.MetaSegmentIndex();
const textStream = {
id: this.nextExternalStreamId_++, // A globally unique ID.
originalId: id, // The CC ID string, like 'CC1', 'CC3', etc.
createSegmentIndex: () => Promise.resolve(),
segmentIndex,
mimeType,
codecs: '',
kind: TextStreamKind.CLOSED_CAPTION,
encrypted: false,
drmInfos: [],
keyIds: new Set(),
language: video.closedCaptions.get(id),
label: null,
type: ContentType.TEXT,
primary: false,
trickModeVideo: null,
emsgSchemeIdUris: null,
roles: video.roles,
forced: false,
channelsCount: null,
audioSamplingRate: null,
spatialAudio: false,
closedCaptions: null,
};
manifest.textStreams.push(textStream);
closedCaptionsSet.add(id);
}
}
}
}
}
/**
* Filters a manifest, removing unplayable streams/variants.
*
* @param {?shaka.extern.Manifest} manifest
* @private
*/
async filterManifest_(manifest) {
await this.filterManifestWithStreamUtils_(manifest);
this.filterManifestWithRestrictions_(manifest);
}
/**
* Filters a manifest, removing unplayable streams/variants.
*
* @param {?shaka.extern.Manifest} manifest
* @private
*/
async filterManifestWithStreamUtils_(manifest) {
goog.asserts.assert(manifest, 'Manifest should exist!');
goog.asserts.assert(this.video_, 'Must not be destroyed');
/** @type {?shaka.extern.Variant} */
const currentVariant = this.streamingEngine_ ?
this.streamingEngine_.getCurrentVariant() : null;
await shaka.util.StreamUtils.filterManifest(
this.drmEngine_, currentVariant, manifest);
this.checkPlayableVariants_(manifest);
}
/**
* Apply the restrictions configuration to the manifest, and check if there's
* a variant that meets the restrictions.
*
* @param {?shaka.extern.Manifest} manifest
* @private
*/
filterManifestWithRestrictions_(manifest) {
// Return if |destroy| is called.
if (this.loadMode_ == shaka.Player.LoadMode.DESTROYED) {
return;
}
const tracksChanged = shaka.util.StreamUtils.applyRestrictions(
manifest.variants, this.config_.restrictions, this.maxHwRes_);
if (tracksChanged && this.streamingEngine_) {
this.onTracksChanged_();
}
// We may need to create new sessions for any new init data.
const curDrmInfo = this.drmEngine_ ? this.drmEngine_.getDrmInfo() : null;
// DrmEngine.newInitData() requires mediaKeys to be available.
if (curDrmInfo && this.drmEngine_.getMediaKeys()) {
for (const variant of manifest.variants) {
const videoDrmInfos = variant.video ? variant.video.drmInfos : [];
const audioDrmInfos = variant.audio ? variant.audio.drmInfos : [];
const drmInfos = videoDrmInfos.concat(audioDrmInfos);
for (const drmInfo of drmInfos) {
// Ignore any data for different key systems.
if (drmInfo.keySystem == curDrmInfo.keySystem) {
for (const initData of (drmInfo.initData || [])) {
this.drmEngine_.newInitData(
initData.initDataType, initData.initData);
}
}
}
}
}
this.checkRestrictedVariants_(manifest);
}
/**
* @private
*/
filterManifestByCurrentVariant_() {
goog.asserts.assert(this.manifest_, 'Manifest should be valid');
goog.asserts.assert(this.streamingEngine_,
'StreamingEngine should be valid');
const currentVariant = this.streamingEngine_ ?
this.streamingEngine_.getCurrentVariant() : null;
shaka.util.StreamUtils.filterManifestByCurrentVariant(currentVariant,
this.manifest_);
this.checkPlayableVariants_(this.manifest_);
}
/**
* @param {shaka.extern.Variant} initialVariant
* @param {number} time
* @return {!Promise.<number>}
* @private
*/
async adjustStartTime_(initialVariant, time) {
/** @type {?shaka.extern.Stream} */
const activeAudio = initialVariant.audio;
/** @type {?shaka.extern.Stream} */
const activeVideo = initialVariant.video;
/**
* @param {?shaka.extern.Stream} stream
* @param {number} time
* @return {!Promise.<?number>}
*/
const getAdjustedTime = async (stream, time) => {
if (!stream) {
return null;
}
await stream.createSegmentIndex();
const ref = stream.segmentIndex[Symbol.iterator]().seek(time);
if (!ref) {
return null;
}
const refTime = ref.startTime;
goog.asserts.assert(refTime <= time,
'Segment should start before target time!');
return refTime;
};
const audioStartTime = await getAdjustedTime(activeAudio, time);
const videoStartTime = await getAdjustedTime(activeVideo, time);
// If we have both video and audio times, pick the larger one. If we picked
// the smaller one, that one will download an entire segment to buffer the
// difference.
if (videoStartTime != null && audioStartTime != null) {
return Math.max(videoStartTime, audioStartTime);
} else if (videoStartTime != null) {
return videoStartTime;
} else if (audioStartTime != null) {
return audioStartTime;
} else {
return time;
}
}
/**
* Update the buffering state to be either "we are buffering" or "we are not
* buffering", firing events to the app as needed.
*
* @private
*/
updateBufferState_() {
const isBuffering = this.isBuffering();
shaka.log.v2('Player changing buffering state to', isBuffering);
// Make sure we have all the components we need before we consider ourselves
// as being loaded.
// TODO: Make the check for "loaded" simpler.
const loaded = this.stats_ && this.bufferObserver_ && this.playhead_;
if (loaded) {
this.playRateController_.setBuffering(isBuffering);
this.updateStateHistory_();
}
// Surface the buffering event so that the app knows if/when we are
// buffering.
const eventName = shaka.Player.EventName.Buffering;
this.dispatchEvent(this.makeEvent_(eventName, {'buffering': isBuffering}));
}
/**
* A callback for when the playback rate changes. We need to watch the
* playback rate so that if the playback rate on the media element changes
* (that was not caused by our play rate controller) we can notify the
* controller so that it can stay in-sync with the change.
*
* @private
*/
onRateChange_() {
/** @type {number} */
const newRate = this.video_.playbackRate;
// On Edge, when someone seeks using the native controls, it will set the
// playback rate to zero until they finish seeking, after which it will
// return the playback rate.
//
// If the playback rate changes while seeking, Edge will cache the playback
// rate and use it after seeking.
//
// https://github.com/google/shaka-player/issues/951
if (newRate == 0) {
return;
}
if (this.playRateController_) {
// The playback rate has changed. This could be us or someone else.
// If this was us, setting the rate again will be a no-op.
this.playRateController_.set(newRate);
}
const event = this.makeEvent_(shaka.Player.EventName.RateChange);
this.dispatchEvent(event);
}
/**
* Try updating the state history. If the player has not finished
* initializing, this will be a no-op.
*
* @private
*/
updateStateHistory_() {
// If we have not finish initializing, this will be a no-op.
if (!this.stats_) {
return;
}
if (!this.bufferObserver_) {
return;
}
const State = shaka.media.BufferingObserver.State;
const history = this.stats_.getStateHistory();
if (this.bufferObserver_.getState() == State.STARVING) {
history.update('buffering');
} else if (this.video_.paused) {
history.update('paused');
} else if (this.video_.ended) {
history.update('ended');
} else {
history.update('playing');
}
}
/**
* Callback from Playhead.
*
* @private
*/
onSeek_() {
if (this.playheadObservers_) {
this.playheadObservers_.notifyOfSeek();
}
if (this.streamingEngine_) {
this.streamingEngine_.seeked();
}
if (this.bufferObserver_) {
// If we seek into an unbuffered range, we should fire a 'buffering' event
// immediately. If StreamingEngine can buffer fast enough, we may not
// update our buffering tracking otherwise.
this.pollBufferState_();
}
}
/**
* Update AbrManager with variants while taking into account restrictions,
* preferences, and ABR.
*
* On error, this dispatches an error event and returns false.
*
* @return {boolean} True if successful.
* @private
*/
updateAbrManagerVariants_() {
try {
goog.asserts.assert(this.manifest_, 'Manifest should exist by now!');
this.checkRestrictedVariants_(this.manifest_);
} catch (e) {
this.onError_(e);
return false;
}
const playableVariants = this.manifest_.variants.filter((variant) => {
return shaka.util.StreamUtils.isPlayable(variant);
});
// Update the abr manager with newly filtered variants.
const adaptationSet = this.currentAdaptationSetCriteria_.create(
playableVariants);
this.abrManager_.setVariants(Array.from(adaptationSet.values()));
return true;
}
/**
* Chooses a variant from all possible variants while taking into account
* restrictions, preferences, and ABR.
*
* On error, this dispatches an error event and returns null.
*
* @return {?shaka.extern.Variant}
* @private
*/
chooseVariant_() {
if (this.updateAbrManagerVariants_()) {
return this.abrManager_.chooseVariant();
} else {
return null;
}
}
/**
* Choose a text stream from all possible text streams while taking into
* account user preference.
*
* @return {?shaka.extern.Stream}
* @private
*/
chooseTextStream_() {
const subset = shaka.util.StreamUtils.filterStreamsByLanguageAndRole(
this.manifest_.textStreams,
this.currentTextLanguage_,
this.currentTextRole_,
this.currentTextForced_);
return subset[0] || null;
}
/**
* Chooses a new Variant. If the new variant differs from the old one, it
* adds the new one to the switch history and switches to it.
*
* Called after a config change, a key status event, or an explicit language
* change.
*
* @private
*/
chooseVariantAndSwitch_() {
goog.asserts.assert(this.config_, 'Must not be destroyed');
// Because we're running this after a config change (manual language
// change) or a key status event, it is always okay to clear the buffer
// here.
const chosenVariant = this.chooseVariant_();
if (chosenVariant) {
this.switchVariant_(chosenVariant, /* fromAdaptation= */ true,
/* clearBuffers= */ true, /* safeMargin= */ 0);
}
}
/**
* @param {shaka.extern.Variant} variant
* @param {boolean} fromAdaptation
* @param {boolean} clearBuffer
* @param {number} safeMargin
* @private
*/
switchVariant_(variant, fromAdaptation, clearBuffer, safeMargin) {
const currentVariant = this.streamingEngine_.getCurrentVariant();
if (variant == currentVariant) {
shaka.log.debug('Variant already selected.');
// If you want to clear the buffer, we force to reselect the same variant
if (clearBuffer) {
this.streamingEngine_.switchVariant(variant, clearBuffer, safeMargin,
/* force= */ true);
}
return;
}
// Add entries to the history.
this.addVariantToSwitchHistory_(variant, fromAdaptation);
this.streamingEngine_.switchVariant(variant, clearBuffer, safeMargin);
let oldTrack = null;
if (currentVariant) {
oldTrack = shaka.util.StreamUtils.variantToTrack(currentVariant);
}
const newTrack = shaka.util.StreamUtils.variantToTrack(variant);
if (fromAdaptation) {
// Dispatch an 'adaptation' event
this.onAdaptation_(oldTrack, newTrack);
} else {
// Dispatch a 'variantchanged' event
this.onVariantChanged_(oldTrack, newTrack);
}
}
/**
* @param {AudioTrack} track
* @private
*/
switchHtml5Track_(track) {
goog.asserts.assert(this.video_ && this.video_.audioTracks,
'Video and video.audioTracks should not be null!');
const audioTracks = Array.from(this.video_.audioTracks);
const currentTrack = audioTracks.find((t) => t.enabled);
// This will reset the "enabled" of other tracks to false.
track.enabled = true;
// AirPlay does not reset the "enabled" of other tracks to false, so
// it must be changed by hand.
if (track.id !== currentTrack.id) {
currentTrack.enabled = false;
}
const oldTrack =
shaka.util.StreamUtils.html5AudioTrackToTrack(currentTrack);
const newTrack =
shaka.util.StreamUtils.html5AudioTrackToTrack(track);
this.onVariantChanged_(oldTrack, newTrack);
}
/**
* Decide during startup if text should be streamed/shown.
* @private
*/
setInitialTextState_(initialVariant, initialTextStream) {
// Check if we should show text (based on difference between audio and text
// languages).
if (initialTextStream) {
if (initialVariant.audio && this.shouldInitiallyShowText_(
initialVariant.audio, initialTextStream)) {
this.isTextVisible_ = true;
}
if (this.isTextVisible_) {
// If the cached value says to show text, then update the text displayer
// since it defaults to not shown.
this.mediaSourceEngine_.getTextDisplayer().setTextVisibility(true);
goog.asserts.assert(this.shouldStreamText_(),
'Should be streaming text');
}
this.onTextTrackVisibility_();
} else {
this.isTextVisible_ = false;
}
}
/**
* Check if we should show text on screen automatically.
*
* The text should automatically be shown if the text is language-compatible
* with the user's text language preference, but not compatible with the
* audio.
*
* For example:
* preferred | chosen | chosen |
* text | text | audio | show
* -----------------------------------
* en-CA | en | jp | true
* en | en-US | fr | true
* fr-CA | en-US | jp | false
* en-CA | en-US | en-US | false
*
* @param {shaka.extern.Stream} audioStream
* @param {shaka.extern.Stream} textStream
* @return {boolean}
* @private
*/
shouldInitiallyShowText_(audioStream, textStream) {
const LanguageUtils = shaka.util.LanguageUtils;
/** @type {string} */
const preferredTextLocale =
LanguageUtils.normalize(this.config_.preferredTextLanguage);
/** @type {string} */
const audioLocale = LanguageUtils.normalize(audioStream.language);
/** @type {string} */
const textLocale = LanguageUtils.normalize(textStream.language);
return (
LanguageUtils.areLanguageCompatible(textLocale, preferredTextLocale) &&
!LanguageUtils.areLanguageCompatible(audioLocale, textLocale));
}
/**
* Callback from StreamingEngine.
*
* @private
*/
onManifestUpdate_() {
if (this.parser_ && this.parser_.update) {
this.parser_.update();
}
}
/**
* Callback from StreamingEngine.
*
* @private
*/
onSegmentAppended_() {
// When we append a segment to media source (via streaming engine) we are
// changing what data we have buffered, so notify the playhead of the
// change.
if (this.playhead_) {
this.playhead_.notifyOfBufferingChange();
}
this.pollBufferState_();
}
/**
* Callback from AbrManager.
*
* @param {shaka.extern.Variant} variant
* @param {boolean=} clearBuffer
* @param {number=} safeMargin Optional amount of buffer (in seconds) to
* retain when clearing the buffer.
* Defaults to 0 if not provided. Ignored if clearBuffer is false.
* @private
*/
switch_(variant, clearBuffer = false, safeMargin = 0) {
shaka.log.debug('switch_');
goog.asserts.assert(this.config_.abr.enabled,
'AbrManager should not call switch while disabled!');
goog.asserts.assert(this.manifest_, 'We need a manifest to switch ' +
'variants.');
if (!this.streamingEngine_) {
// There's no way to change it.
return;
}
if (variant == this.streamingEngine_.getCurrentVariant()) {
// This isn't a change.
return;
}
this.switchVariant_(variant, /* fromAdaptation= */ true,
clearBuffer, safeMargin);
}
/**
* Dispatches an 'adaptation' event.
* @param {?shaka.extern.Track} from
* @param {shaka.extern.Track} to
* @private
*/
onAdaptation_(from, to) {
// Delay the 'adaptation' event so that StreamingEngine has time to absorb
// the changes before the user tries to query it.
const event = this.makeEvent_(shaka.Player.EventName.Adaptation, {
oldTrack: from,
newTrack: to,
});
this.delayDispatchEvent_(event);
}
/**
* Dispatches a 'trackschanged' event.
* @private
*/
onTracksChanged_() {
// Delay the 'trackschanged' event so StreamingEngine has time to absorb the
// changes before the user tries to query it.
const event = this.makeEvent_(shaka.Player.EventName.TracksChanged);
this.delayDispatchEvent_(event);
}
/**
* Dispatches a 'variantchanged' event.
* @param {?shaka.extern.Track} from
* @param {shaka.extern.Track} to
* @private
*/
onVariantChanged_(from, to) {
// Delay the 'variantchanged' event so StreamingEngine has time to absorb
// the changes before the user tries to query it.
const event = this.makeEvent_(shaka.Player.EventName.VariantChanged, {
oldTrack: from,
newTrack: to,
});
this.delayDispatchEvent_(event);
}
/**
* Dispatches a 'textchanged' event.
* @private
*/
onTextChanged_() {
// Delay the 'textchanged' event so StreamingEngine time to absorb the
// changes before the user tries to query it.
const event = this.makeEvent_(shaka.Player.EventName.TextChanged);
this.delayDispatchEvent_(event);
}
/** @private */
onTextTrackVisibility_() {
const event = this.makeEvent_(shaka.Player.EventName.TextTrackVisibility);
this.delayDispatchEvent_(event);
}
/** @private */
onAbrStatusChanged_() {
const event = this.makeEvent_(shaka.Player.EventName.AbrStatusChanged, {
newStatus: this.config_.abr.enabled,
});
this.delayDispatchEvent_(event);
}
/**
* @param {!shaka.util.Error} error
* @private
*/
onError_(error) {
goog.asserts.assert(error instanceof shaka.util.Error, 'Wrong error type!');
// Errors dispatched after |destroy| is called are not meaningful and should
// be safe to ignore.
if (this.loadMode_ == shaka.Player.LoadMode.DESTROYED) {
return;
}
const eventName = shaka.Player.EventName.Error;
const event = this.makeEvent_(eventName, {'detail': error});
this.dispatchEvent(event);
if (event.defaultPrevented) {
error.handled = true;
}
}
/**
* When we fire region events, we need to copy the information out of the
* region to break the connection with the player's internal data. We do the
* copy here because this is the transition point between the player and the
* app.
*
* @param {!shaka.Player.EventName} eventName
* @param {shaka.extern.TimelineRegionInfo} region
*
* @private
*/
onRegionEvent_(eventName, region) {
// Always make a copy to avoid exposing our internal data to the app.
const clone = {
schemeIdUri: region.schemeIdUri,
value: region.value,
startTime: region.startTime,
endTime: region.endTime,
id: region.id,
eventElement: region.eventElement,
};
this.dispatchEvent(this.makeEvent_(eventName, {detail: clone}));
}
/**
* Turn the media element's error object into a Shaka Player error object.
*
* @return {shaka.util.Error}
* @private
*/
videoErrorToShakaError_() {
goog.asserts.assert(this.video_.error,
'Video error expected, but missing!');
if (!this.video_.error) {
return null;
}
const code = this.video_.error.code;
if (code == 1 /* MEDIA_ERR_ABORTED */) {
// Ignore this error code, which should only occur when navigating away or
// deliberately stopping playback of HTTP content.
return null;
}
// Extra error information from MS Edge:
let extended = this.video_.error.msExtendedCode;
if (extended) {
// Convert to unsigned:
if (extended < 0) {
extended += Math.pow(2, 32);
}
// Format as hex:
extended = extended.toString(16);
}
// Extra error information from Chrome:
const message = this.video_.error.message;
return new shaka.util.Error(
shaka.util.Error.Severity.CRITICAL,
shaka.util.Error.Category.MEDIA,
shaka.util.Error.Code.VIDEO_ERROR,
code, extended, message);
}
/**
* @param {!Event} event
* @private
*/
onVideoError_(event) {
const error = this.videoErrorToShakaError_();
if (!error) {
return;
}
this.onError_(error);
}
/**
* @param {!Object.<string, string>} keyStatusMap A map of hex key IDs to
* statuses.
* @private
*/
onKeyStatus_(keyStatusMap) {
if (!this.streamingEngine_) {
// We can't use this info to manage restrictions in src= mode, so ignore
// it.
return;
}
const keyIds = Object.keys(keyStatusMap);
if (keyIds.length == 0) {
shaka.log.warning(
'Got a key status event without any key statuses, so we don\'t ' +
'know the real key statuses. If we don\'t have all the keys, ' +
'you\'ll need to set restrictions so we don\'t select those tracks.');
}
// If EME is using a synthetic key ID, the only key ID is '00' (a single 0
// byte). In this case, it is only used to report global success/failure.
// See note about old platforms in: https://bit.ly/2tpez5Z
const isGlobalStatus = keyIds.length == 1 && keyIds[0] == '00';
if (isGlobalStatus) {
shaka.log.warning(
'Got a synthetic key status event, so we don\'t know the real key ' +
'statuses. If we don\'t have all the keys, you\'ll need to set ' +
'restrictions so we don\'t select those tracks.');
}
const restrictedStatuses = shaka.Player.restrictedStatuses_;
let tracksChanged = false;
// Only filter tracks for keys if we have some key statuses to look at.
if (keyIds.length) {
for (const variant of this.manifest_.variants) {
const streams = shaka.util.StreamUtils.getVariantStreams(variant);
for (const stream of streams) {
const originalAllowed = variant.allowedByKeySystem;
// Only update if we have key IDs for the stream. If the keys aren't
// all present, then the track should be restricted.
if (stream.keyIds.size) {
variant.allowedByKeySystem = true;
for (const keyId of stream.keyIds) {
const keyStatus = keyStatusMap[isGlobalStatus ? '00' : keyId];
variant.allowedByKeySystem = variant.allowedByKeySystem &&
!!keyStatus && !restrictedStatuses.includes(keyStatus);
}
}
if (originalAllowed != variant.allowedByKeySystem) {
tracksChanged = true;
}
} // for (const stream of streams)
} // for (const variant of this.manifest_.variants)
} // if (keyIds.size)
if (tracksChanged) {
this.updateAbrManagerVariants_();
}
const currentVariant = this.streamingEngine_.getCurrentVariant();
if (currentVariant && !currentVariant.allowedByKeySystem) {
shaka.log.debug('Choosing new streams after key status changed');
this.chooseVariantAndSwitch_();
}
if (tracksChanged) {
this.onTracksChanged_();
}
}
/**
* Callback from DrmEngine
* @param {string} keyId
* @param {number} expiration
* @private
*/
onExpirationUpdated_(keyId, expiration) {
if (this.parser_ && this.parser_.onExpirationUpdated) {
this.parser_.onExpirationUpdated(keyId, expiration);
}
const event = this.makeEvent_(shaka.Player.EventName.ExpirationUpdated);
this.dispatchEvent(event);
}
/**
* @return {boolean} true if we should stream text right now.
* @private
*/
shouldStreamText_() {
return this.config_.streaming.alwaysStreamText || this.isTextTrackVisible();
}
/**
* Applies playRangeStart and playRangeEnd to the given timeline. This will
* only affect non-live content.
*
* @param {shaka.media.PresentationTimeline} timeline
* @param {number} playRangeStart
* @param {number} playRangeEnd
*
* @private
*/
static applyPlayRange_(timeline, playRangeStart, playRangeEnd) {
if (playRangeStart > 0) {
if (timeline.isLive()) {
shaka.log.warning(
'|playRangeStart| has been configured for live content. ' +
'Ignoring the setting.');
} else {
timeline.setUserSeekStart(playRangeStart);
}
}
// If the playback has been configured to end before the end of the
// presentation, update the duration unless it's live content.
const fullDuration = timeline.getDuration();
if (playRangeEnd < fullDuration) {
if (timeline.isLive()) {
shaka.log.warning(
'|playRangeEnd| has been configured for live content. ' +
'Ignoring the setting.');
} else {
timeline.setDuration(playRangeEnd);
}
}
}
/**
* Checks if the variants are all restricted, and throw an appropriate
* exception if so.
*
* @param {shaka.extern.Manifest} manifest
*
* @private
*/
checkRestrictedVariants_(manifest) {
const restrictedStatuses = shaka.Player.restrictedStatuses_;
const keyStatusMap =
this.drmEngine_ ? this.drmEngine_.getKeyStatuses() : {};
const keyIds = Object.keys(keyStatusMap);
const isGlobalStatus = keyIds.length && keyIds[0] == '00';
let hasPlayable = false;
let hasAppRestrictions = false;
/** @type {!Set.<string>} */
const missingKeys = new Set();
/** @type {!Set.<string>} */
const badKeyStatuses = new Set();
for (const variant of manifest.variants) {
// TODO: Combine with onKeyStatus_.
const streams = [];
if (variant.audio) {
streams.push(variant.audio);
}
if (variant.video) {
streams.push(variant.video);
}
for (const stream of streams) {
if (stream.keyIds.size) {
for (const keyId of stream.keyIds) {
const keyStatus = keyStatusMap[isGlobalStatus ? '00' : keyId];
if (!keyStatus) {
missingKeys.add(keyId);
} else if (restrictedStatuses.includes(keyStatus)) {
badKeyStatuses.add(keyStatus);
}
}
} // if (stream.keyIds.size)
}
if (!variant.allowedByApplication) {
hasAppRestrictions = true;
} else if (variant.allowedByKeySystem) {
hasPlayable = true;
}
}
if (!hasPlayable) {
/** @type {shaka.extern.RestrictionInfo} */
const data = {
hasAppRestrictions,
missingKeys: Array.from(missingKeys),
restrictedKeyStatuses: Array.from(badKeyStatuses),
};
throw new shaka.util.Error(
shaka.util.Error.Severity.CRITICAL,
shaka.util.Error.Category.MANIFEST,
shaka.util.Error.Code.RESTRICTIONS_CANNOT_BE_MET,
data);
}
}
/**
* Confirm some variants are playable. Otherwise, throw an exception.
* @param {!shaka.extern.Manifest} manifest
* @private
*/
checkPlayableVariants_(manifest) {
const valid = manifest.variants.some(shaka.util.StreamUtils.isPlayable);
// If none of the variants are playable, throw
// CONTENT_UNSUPPORTED_BY_BROWSER.
if (!valid) {
throw new shaka.util.Error(
shaka.util.Error.Severity.CRITICAL,
shaka.util.Error.Category.MANIFEST,
shaka.util.Error.Code.CONTENT_UNSUPPORTED_BY_BROWSER);
}
}
/**
* Fire an event, but wait a little bit so that the immediate execution can
* complete before the event is handled.
*
* @param {!shaka.util.FakeEvent} event
* @private
*/
async delayDispatchEvent_(event) {
// Wait until the next interpreter cycle.
await Promise.resolve();
// Only dispatch the event if we are still alive.
if (this.loadMode_ != shaka.Player.LoadMode.DESTROYED) {
this.dispatchEvent(event);
}
}
/**
* Get the normalized languages for a group of tracks.
*
* @param {!Array.<?shaka.extern.Track>} tracks
* @return {!Set.<string>}
* @private
*/
static getLanguagesFrom_(tracks) {
const languages = new Set();
for (const track of tracks) {
if (track.language) {
languages.add(shaka.util.LanguageUtils.normalize(track.language));
} else {
languages.add('und');
}
}
return languages;
}
/**
* Get all permutations of normalized languages and role for a group of
* tracks.
*
* @param {!Array.<?shaka.extern.Track>} tracks
* @return {!Array.<shaka.extern.LanguageRole>}
* @private
*/
static getLanguageAndRolesFrom_(tracks) {
/** @type {!Map.<string, !Set>} */
const languageToRoles = new Map();
/** @type {!Map.<string, !Map.<string, string>>} */
const languageRoleToLabel = new Map();
for (const track of tracks) {
let language = 'und';
let roles = [];
if (track.language) {
language = shaka.util.LanguageUtils.normalize(track.language);
}
if (track.type == 'variant') {
roles = track.audioRoles;
} else {
roles = track.roles;
}
if (!roles || !roles.length) {
// We must have an empty role so that we will still get a language-role
// entry from our Map.
roles = [''];
}
if (!languageToRoles.has(language)) {
languageToRoles.set(language, new Set());
}
for (const role of roles) {
languageToRoles.get(language).add(role);
if (track.label) {
if (!languageRoleToLabel.has(language)) {
languageRoleToLabel.set(language, new Map());
}
languageRoleToLabel.get(language).set(role, track.label);
}
}
}
// Flatten our map to an array of language-role pairs.
const pairings = [];
languageToRoles.forEach((roles, language) => {
for (const role of roles) {
let label = null;
if (languageRoleToLabel.has(language) &&
languageRoleToLabel.get(language).has(role)) {
label = languageRoleToLabel.get(language).get(role);
}
pairings.push({language, role, label});
}
});
return pairings;
}
/**
* Assuming the player is playing content with media source, check if the
* player has buffered enough content to make it to the end of the
* presentation.
*
* @return {boolean}
* @private
*/
isBufferedToEndMS_() {
goog.asserts.assert(
this.video_,
'We need a video element to get buffering information');
goog.asserts.assert(
this.mediaSourceEngine_,
'We need a media source engine to get buffering information');
goog.asserts.assert(
this.manifest_,
'We need a manifest to get buffering information');
// This is a strong guarantee that we are buffered to the end, because it
// means the playhead is already at that end.
if (this.video_.ended) {
return true;
}
// This means that MediaSource has buffered the final segment in all
// SourceBuffers and is no longer accepting additional segments.
if (this.mediaSourceEngine_.ended()) {
return true;
}
// Live streams are "buffered to the end" when they have buffered to the
// live edge or beyond (into the region covered by the presentation delay).
if (this.manifest_.presentationTimeline.isLive()) {
const liveEdge =
this.manifest_.presentationTimeline.getSegmentAvailabilityEnd();
const bufferEnd =
shaka.media.TimeRangesUtils.bufferEnd(this.video_.buffered);
if (bufferEnd != null && bufferEnd >= liveEdge) {
return true;
}
}
return false;
}
/**
* Assuming the player is playing content with src=, check if the player has
* buffered enough content to make it to the end of the presentation.
*
* @return {boolean}
* @private
*/
isBufferedToEndSrc_() {
goog.asserts.assert(
this.video_,
'We need a video element to get buffering information');
// This is a strong guarantee that we are buffered to the end, because it
// means the playhead is already at that end.
if (this.video_.ended) {
return true;
}
// If we have buffered to the duration of the content, it means we will have
// enough content to buffer to the end of the presentation.
const bufferEnd =
shaka.media.TimeRangesUtils.bufferEnd(this.video_.buffered);
// Because Safari's native HLS reports slightly inaccurate values for
// bufferEnd here, we use a fudge factor. Without this, we can end up in a
// buffering state at the end of the stream. See issue #2117.
// TODO: Try to remove the fudge here once we no longer manage buffering
// state above the browser with playbackRate=0.
const fudge = 1; // 1000 ms
return bufferEnd != null && bufferEnd >= this.video_.duration - fudge;
}
/**
* Create an error for when we purposely interrupt a load operation.
*
* @return {!shaka.util.Error}
* @private
*/
createAbortLoadError_() {
return new shaka.util.Error(
shaka.util.Error.Severity.CRITICAL,
shaka.util.Error.Category.PLAYER,
shaka.util.Error.Code.LOAD_INTERRUPTED);
}
/**
* Key
* ----------------------
* D : Detach Node
* A : Attach Node
* MS : Media Source Node
* P : Manifest Parser Node
* M : Manifest Node
* DRM : Drm Engine Node
* L : Load Node
* U : Unloading Node
* SRC : Src Equals Node
*
* Graph Topology
* ----------------------
*
* [SRC]-----+
* ^ |
* | v
* [D]<-->[A]<-----[U]
* | ^
* v |
* [MS]------+
* | |
* v |
* [P]-------+
* | |
* v |
* [M]-------+
* | |
* v |
* [DRM]-----+
* | |
* v |
* [L]-------+
*
* @param {!shaka.routing.Node} currentlyAt
* @param {shaka.routing.Payload} currentlyWith
* @param {!shaka.routing.Node} wantsToBeAt
* @param {shaka.routing.Payload} wantsToHave
* @return {?shaka.routing.Node}
* @private
*/
getNextStep_(currentlyAt, currentlyWith, wantsToBeAt, wantsToHave) {
let next = null;
// Detach is very simple, either stay in detach (because |detach| was called
// while in detached) or go somewhere that requires us to attach to an
// element.
if (currentlyAt == this.detachNode_) {
next = wantsToBeAt == this.detachNode_ ?
this.detachNode_ :
this.attachNode_;
}
if (currentlyAt == this.attachNode_) {
next = this.getNextAfterAttach_(wantsToBeAt, currentlyWith, wantsToHave);
}
if (currentlyAt == this.mediaSourceNode_) {
next = this.getNextAfterMediaSource_(
wantsToBeAt, currentlyWith, wantsToHave);
}
if (currentlyAt == this.parserNode_) {
next = this.getNextMatchingAllDependencies_(
/* destination= */ this.loadNode_,
/* next= */ this.manifestNode_,
/* reset= */ this.unloadNode_,
/* goingTo= */ wantsToBeAt,
/* has= */ currentlyWith,
/* wants= */ wantsToHave);
}
if (currentlyAt == this.manifestNode_) {
next = this.getNextMatchingAllDependencies_(
/* destination= */ this.loadNode_,
/* next= */ this.drmNode_,
/* reset= */ this.unloadNode_,
/* goingTo= */ wantsToBeAt,
/* has= */ currentlyWith,
/* wants= */ wantsToHave);
}
// For DRM, we have two options "load" or "unload". If all our constraints
// are met, we can go to "load". If anything is off, we must go back to
// "unload" to reset.
if (currentlyAt == this.drmNode_) {
next = this.getNextMatchingAllDependencies_(
/* destination= */ this.loadNode_,
/* next= */ this.loadNode_,
/* reset= */ this.unloadNode_,
/* goingTo= */ wantsToBeAt,
/* has= */ currentlyWith,
/* wants= */ wantsToHave);
}
// For DRM w/ src= playback, we only care about destination and media
// element.
if (currentlyAt == this.srcEqualsDrmNode_) {
if (wantsToBeAt == this.srcEqualsNode_ &&
currentlyWith.mediaElement == wantsToHave.mediaElement) {
next = this.srcEqualsNode_;
} else {
next = this.unloadNode_;
}
}
// After we load content, always go through unload because we can't safely
// use components after we have started playback.
if (currentlyAt == this.loadNode_ || currentlyAt == this.srcEqualsNode_) {
next = this.unloadNode_;
}
if (currentlyAt == this.unloadNode_) {
next = this.getNextAfterUnload_(wantsToBeAt, currentlyWith, wantsToHave);
}
goog.asserts.assert(next, 'Missing next step!');
return next;
}
/**
* @param {!shaka.routing.Node} goingTo
* @param {shaka.routing.Payload} has
* @param {shaka.routing.Payload} wants
* @return {?shaka.routing.Node}
* @private
*/
getNextAfterAttach_(goingTo, has, wants) {
// Attach and detach are the only two nodes that we can directly go
// back-and-forth between.
if (goingTo == this.detachNode_) {
return this.detachNode_;
}
// If we are going anywhere other than detach, then we need the media
// element to match, if they don't match, we need to go through detach
// first.
if (has.mediaElement != wants.mediaElement) {
return this.detachNode_;
}
// If we are already in attached, and someone calls |attach| again (to the
// same video element), we can handle the redundant request by re-entering
// our current state.
if (goingTo == this.attachNode_) {
return this.attachNode_;
}
// The next step from attached to loaded is through media source.
if (goingTo == this.mediaSourceNode_ || goingTo == this.loadNode_) {
return this.mediaSourceNode_;
}
// If we are going to src=, then we should set up DRM first. This will
// support cases like FairPlay HLS on Safari.
if (goingTo == this.srcEqualsNode_) {
return this.srcEqualsDrmNode_;
}
// We are missing a rule, the null will get caught by a common check in
// the routing system.
return null;
}
/**
* @param {!shaka.routing.Node} goingTo
* @param {shaka.routing.Payload} has
* @param {shaka.routing.Payload} wants
* @return {?shaka.routing.Node}
* @private
*/
getNextAfterMediaSource_(goingTo, has, wants) {
// We can only go to parse manifest or unload. If we want to go to load and
// we have the right media element, we can go to parse manifest. If we
// don't, no matter where we want to go, we must go through unload.
if (goingTo == this.loadNode_ && has.mediaElement == wants.mediaElement) {
return this.parserNode_;
}
// Right now the unload node is responsible for tearing down all playback
// components (including media source). So since we have created media
// source, we need to unload since our dependencies are not compatible.
//
// TODO: We are structured this way to maintain a historic structure. Going
// forward, there is no reason to restrict ourselves to this. Going
// forward we should explore breaking apart |onUnload| and develop
// more meaningful terminology around tearing down playback resources.
return this.unloadNode_;
}
/**
* After unload there are only two options, attached or detached. This choice
* is based on whether or not we have a media element. If we have a media
* element, then we go to attach. If we don't have a media element, we go to
* detach.
*
* @param {!shaka.routing.Node} goingTo
* @param {shaka.routing.Payload} has
* @param {shaka.routing.Payload} wants
* @return {?shaka.routing.Node}
* @private
*/
getNextAfterUnload_(goingTo, has, wants) {
// If we don't want a media element, detach.
// If we have the wrong media element, detach.
// Otherwise it means we want to attach to a media element and it is safe to
// do so.
return !wants.mediaElement || has.mediaElement != wants.mediaElement ?
this.detachNode_ :
this.attachNode_;
}
/**
* A general method used to handle routing when we can either than one step
* toward our destination (while all our dependencies match) or go to a node
* that will reset us so we can try again.
*
* @param {!shaka.routing.Node} destinationNode
* What |goingTo| must be for us to step toward |nextNode|. Otherwise we
* will go to |resetNode|.
* @param {!shaka.routing.Node} nextNode
* The node we will go to next if |goingTo == destinationNode| and all
* dependencies match.
* @param {!shaka.routing.Node} resetNode
* The node we will go to next if |goingTo != destinationNode| or any
* dependency does not match.
* @param {!shaka.routing.Node} goingTo
* The node that the walker is trying to go to.
* @param {shaka.routing.Payload} has
* The payload that the walker currently has.
* @param {shaka.routing.Payload} wants
* The payload that the walker wants to have when iy gets to |goingTo|.
* @return {shaka.routing.Node}
* @private
*/
getNextMatchingAllDependencies_(destinationNode, nextNode, resetNode, goingTo,
has, wants) {
if (goingTo == destinationNode &&
has.mediaElement == wants.mediaElement &&
has.uri == wants.uri &&
has.mimeType == wants.mimeType) {
return nextNode;
}
return resetNode;
}
/**
* @return {shaka.routing.Payload}
* @private
*/
static createEmptyPayload_() {
return {
mediaElement: null,
mimeType: null,
startTime: null,
startTimeOfLoad: NaN,
uri: null,
};
}
/**
* Using a promise, wrap the listeners returned by |Walker.startNewRoute|.
* This will work for most usages in |Player| but should not be used for
* special cases.
*
* This will connect |onCancel|, |onEnd|, |onError|, and |onSkip| with
* |resolve| and |reject| but will leave |onStart| unset.
*
* @param {shaka.routing.Walker.Listeners} listeners
* @return {!Promise}
* @private
*/
wrapWalkerListenersWithPromise_(listeners) {
return new Promise((resolve, reject) => {
listeners.onCancel = () => reject(this.createAbortLoadError_());
listeners.onEnd = () => resolve();
listeners.onError = (e) => reject(e);
listeners.onSkip = () => reject(this.createAbortLoadError_());
});
}
};
/**
* An internal enum that contains the string values of all of the player events.
* This exists primarily to act as an implicit list of events, for tests.
*
* @enum {string}
*/
shaka.Player.EventName = {
AbrStatusChanged: 'abrstatuschanged',
Adaptation: 'adaptation',
Buffering: 'buffering',
DownloadFailed: 'downloadfailed',
DownloadHeadersReceived: 'downloadheadersreceived',
DrmSessionUpdate: 'drmsessionupdate',
Emsg: 'emsg',
Error: 'error',
ExpirationUpdated: 'expirationupdated',
LargeGap: 'largegap',
Loaded: 'loaded',
Loading: 'loading',
ManifestParsed: 'manifestparsed',
Metadata: 'metadata',
OnStateChange: 'onstatechange',
OnStateIdle: 'onstateidle',
RateChange: 'ratechange',
SessionDataEvent: 'sessiondata',
Streaming: 'streaming',
TextChanged: 'textchanged',
TextTrackVisibility: 'texttrackvisibility',
TimelineRegionAdded: 'timelineregionadded',
TimelineRegionEnter: 'timelineregionenter',
TimelineRegionExit: 'timelineregionexit',
TracksChanged: 'trackschanged',
Unloading: 'unloading',
VariantChanged: 'variantchanged',
};
/**
* In order to know what method of loading the player used for some content, we
* have this enum. It lets us know if content has not been loaded, loaded with
* media source, or loaded with src equals.
*
* This enum has a low resolution, because it is only meant to express the
* outer limits of the various states that the player is in. For example, when
* someone calls a public method on player, it should not matter if they have
* initialized drm engine, it should only matter if they finished loading
* content.
*
* @enum {number}
* @export
*/
shaka.Player.LoadMode = {
'DESTROYED': 0,
'NOT_LOADED': 1,
'MEDIA_SOURCE': 2,
'SRC_EQUALS': 3,
};
/**
* The typical buffering threshold. When we have less than this buffered (in
* seconds), we enter a buffering state. This specific value is based on manual
* testing and evaluation across a variety of platforms.
*
* To make the buffering logic work in all cases, this "typical" threshold will
* be overridden if the rebufferingGoal configuration is too low.
*
* @const {number}
* @private
*/
shaka.Player.TYPICAL_BUFFERING_THRESHOLD_ = 0.5;
/**
* @define {string} A version number taken from git at compile time.
* @export
*/
shaka.Player.version = 'v3.3.0-pre-uncompiled';
// Initialize the deprecation system using the version string we just set
// on the player.
shaka.Deprecate.init(shaka.Player.version);
/**
* These are the EME key statuses that represent restricted playback.
* 'usable', 'released', 'output-downscaled', 'status-pending' are statuses
* of the usable keys. 'expired' status is being handled separately in
* DrmEngine.
*
* @const {!Array.<string>}
* @private
*/
shaka.Player.restrictedStatuses_ = ['output-restricted', 'internal-error'];
/** @private {!Object.<string, function():*>} */
shaka.Player.supportPlugins_ = {};
/** @private {?shaka.extern.IAdManager.Factory} */
shaka.Player.adManagerFactory_ = null;
/**
* @const {!Object.<string, string>}
* @private
*/
shaka.Player.SRC_EQUAL_EXTENSIONS_TO_MIME_TYPES_ = {
'mp4': 'video/mp4',
'm4v': 'video/mp4',
'm4a': 'audio/mp4',
'webm': 'video/webm',
'weba': 'audio/webm',
'mkv': 'video/webm', // Chromium browsers supports it.
'ts': 'video/mp2t',
'ogv': 'video/ogg',
'ogg': 'audio/ogg',
'mpg': 'video/mpeg',
'mpeg': 'video/mpeg',
'm3u8': 'application/x-mpegurl',
'mp3': 'audio/mpeg',
'aac': 'audio/aac',
'flac': 'audio/flac',
'wav': 'audio/wav',
};
/**
* @const {!Object.<string, string>}
* @private
*/
shaka.Player.TEXT_EXTENSIONS_TO_MIME_TYPES_ = {
'sbv': 'text/x-subviewer',
'srt': 'text/srt',
'vtt': 'text/vtt',
'webvtt': 'text/vtt',
'ttml': 'application/ttml+xml',
'lrc': 'application/x-subtitle-lrc',
'ssa': 'text/x-ssa',
'ass': 'text/x-ssa',
};
/**
* @const {string}
*/
shaka.Player.TextTrackLabel = 'Shaka Player TextTrack'; | this.parserFactory_ = null;
/** @private {?shaka.extern.Manifest} */
this.manifest_ = null; |
ExamCalendar.test.tsx | import * as React from 'react';
import { mount } from 'enzyme';
import _ from 'lodash';
import { Link, MemoryRouter } from 'react-router-dom';
import { ModuleWithColor } from 'types/views';
import mockModules from '__mocks__/modules';
/** @vars {Module} */
import GER1000 from '__mocks__/modules/GER1000.json';
import { Semester } from 'types/modules';
import ExamCalendar, { getTimeSegment } from './ExamCalendar';
import styles from './ExamCalendar.scss';
const TR_PER_WEEK = 4; | ...module,
colorIndex: i,
})) as unknown) as ModuleWithColor[];
function make(modules: ModuleWithColor[] = [], semester: Semester = 1) {
return mount(
<MemoryRouter>
<ExamCalendar semester={semester} modules={modules} />
</MemoryRouter>,
);
}
// AY17/18 semester 1 exams are from Nov 27 to Dec 9
// November 2017 December 2017
// Su Mo Tu We Th Fr Sa Su Mo Tu We Th Fr Sa
// 1 2 3 4 1 2
// 5 6 7 8 9 10 11 3 4 5 6 7 8 9
// 12 13 14 15 16 17 18 10 11 12 13 14 15 16
// 19 20 21 22 23 24 25 17 18 19 20 21 22 23
// 26 27 28 29 30 24 25 26 27 28 29 30
// 31
// Mock module exam dates in semester 1 -
// - GER1000: 2017-11-25 (Sat) Afternoon
// - CS1010S: 2017-11-29 (Wed) Evening
// - ACC2002: 2017-12-01 (Fri) Morning
// - CS4243: 2017-11-29 (Wed) Morning
// - GES1021: 2017-11-29 (Wed) Evening
// - PC1222: 2017-12-05 (Tue) Evening
// - CS3216: No exams
describe(ExamCalendar, () => {
test('only show Saturday if there is a Saturday exam', () => {
const withSaturdayExams = make([(GER1000 as unknown) as ModuleWithColor]);
const withoutSaturdayExams = make(modulesWithColor);
expect(withSaturdayExams.find('thead th')).toHaveLength(6);
expect(withoutSaturdayExams.find('thead th')).toHaveLength(5);
});
test('show month names only in the first cell and on first weekday of month', () => {
const wrapper = make(modulesWithColor);
wrapper.find(`.${styles.day} h3`).forEach((element, index) => {
if (index === 0) {
expect(element.text()).toEqual('Nov 27');
} else if (index === 4) {
expect(element.text()).toEqual('Dec 1');
} else {
// Expect it to be a valid numeric string from 1-31
expect(_.range(1, 32)).toContain(Number(element.text()));
}
});
});
test('show modules that have exams', () => {
const wrapper = make(modulesWithColor);
expect(
wrapper
.find(Link)
.map((element) => element.find(`.${styles.moduleCode}`).text())
.sort(),
).toEqual(['ACC2002', 'CS1010S', 'GES1021', 'PC1222']);
});
test('show modules outside the two week exam period', () => {
const wrapper = make([(GER1000 as unknown) as ModuleWithColor]);
expect(wrapper.find(Link)).toHaveLength(1);
expect(wrapper.find('tbody tr')).toHaveLength(TR_PER_WEEK);
});
test('should hide modules which are hidden in timetable', () => {
const modules = _.cloneDeep(modulesWithColor);
modules[0].hiddenInTimetable = true;
const wrapper = make(modules);
expect(wrapper.find(Link)).toHaveLength(3);
});
});
describe(getTimeSegment, () => {
it.each([
['8:30 AM', 'Morning'],
['9:00 AM', 'Morning'],
['10:00 AM', 'Morning'],
['11:00 AM', 'Morning'],
['12:00 PM', 'Afternoon'],
['1:00 PM', 'Afternoon'],
['2:00 PM', 'Afternoon'],
['2:30 PM', 'Afternoon'],
['3:00 PM', 'Afternoon'],
['5:00 PM', 'Evening'],
['6:30 PM', 'Evening'],
])('%s is in the %s', (time, expected) => expect(getTimeSegment(time)).toBe(expected));
}); | const modulesWithColor = (mockModules.map((module, i) => ({ |
test_qubit.py | import random
from sympy import Integer, Matrix, Rational, sqrt, symbols
from sympy.core.compatibility import range, long
from sympy.physics.quantum.qubit import (measure_all, measure_partial,
matrix_to_qubit, matrix_to_density,
qubit_to_matrix, IntQubit,
IntQubitBra, QubitBra)
from sympy.physics.quantum.gate import (HadamardGate, CNOT, XGate, YGate,
ZGate, PhaseGate)
from sympy.physics.quantum.qapply import qapply
from sympy.physics.quantum.represent import represent
from sympy.physics.quantum.shor import Qubit
from sympy.utilities.pytest import raises
from sympy.physics.quantum.density import Density
from sympy.core.trace import Tr
x, y = symbols('x,y')
epsilon = .000001
def test_Qubit():
array = [0, 0, 1, 1, 0]
qb = Qubit('00110')
assert qb.flip(0) == Qubit('00111')
assert qb.flip(1) == Qubit('00100')
assert qb.flip(4) == Qubit('10110')
assert qb.qubit_values == (0, 0, 1, 1, 0)
assert qb.dimension == 5
for i in range(5):
assert qb[i] == array[4 - i]
assert len(qb) == 5
qb = Qubit('110')
def test_QubitBra():
qb = Qubit(0)
qb_bra = QubitBra(0)
assert qb.dual_class() == QubitBra
assert qb_bra.dual_class() == Qubit
qb = Qubit(1, 1, 0)
qb_bra = QubitBra(1, 1, 0)
assert represent(qb, nqubits=3).H == represent(qb_bra, nqubits=3)
qb = Qubit(0, 1)
qb_bra = QubitBra(1,0)
assert qb._eval_innerproduct_QubitBra(qb_bra) == Integer(0)
qb_bra = QubitBra(0, 1)
assert qb._eval_innerproduct_QubitBra(qb_bra) == Integer(1)
def test_IntQubit():
# issue 9136
iqb = IntQubit(0, nqubits=1)
assert qubit_to_matrix(Qubit('0')) == qubit_to_matrix(iqb)
qb = Qubit('1010')
assert qubit_to_matrix(IntQubit(qb)) == qubit_to_matrix(qb)
iqb = IntQubit(1, nqubits=1)
assert qubit_to_matrix(Qubit('1')) == qubit_to_matrix(iqb)
assert qubit_to_matrix(IntQubit(1)) == qubit_to_matrix(iqb)
iqb = IntQubit(7, nqubits=4)
assert qubit_to_matrix(Qubit('0111')) == qubit_to_matrix(iqb)
assert qubit_to_matrix(IntQubit(7, 4)) == qubit_to_matrix(iqb)
iqb = IntQubit(8)
assert iqb.as_int() == 8
assert iqb.qubit_values == (1, 0, 0, 0)
iqb = IntQubit(7, 4)
assert iqb.qubit_values == (0, 1, 1, 1)
assert IntQubit(3) == IntQubit(3, 2)
#test Dual Classes
iqb = IntQubit(3)
iqb_bra = IntQubitBra(3)
assert iqb.dual_class() == IntQubitBra
assert iqb_bra.dual_class() == IntQubit
iqb = IntQubit(5)
iqb_bra = IntQubitBra(5)
assert iqb._eval_innerproduct_IntQubitBra(iqb_bra) == Integer(1)
iqb = IntQubit(4)
iqb_bra = IntQubitBra(5)
assert iqb._eval_innerproduct_IntQubitBra(iqb_bra) == Integer(0)
raises(ValueError, lambda: IntQubit(4, 1))
raises(ValueError, lambda: IntQubit('5'))
raises(ValueError, lambda: IntQubit(5, '5'))
raises(ValueError, lambda: IntQubit(5, nqubits='5'))
raises(TypeError, lambda: IntQubit(5, bad_arg=True))
def test_superposition_of_states():
state = 1/sqrt(2)*Qubit('01') + 1/sqrt(2)*Qubit('10')
state_gate = CNOT(0, 1)*HadamardGate(0)*state
state_expanded = Qubit('01')/2 + Qubit('00')/2 - Qubit('11')/2 + Qubit('10')/2
assert qapply(state_gate).expand() == state_expanded
assert matrix_to_qubit(represent(state_gate, nqubits=2)) == state_expanded
#test apply methods
def test_apply_represent_equality():
gates = [HadamardGate(int(3*random.random())),
XGate(int(3*random.random())), ZGate(int(3*random.random())),
YGate(int(3*random.random())), ZGate(int(3*random.random())),
PhaseGate(int(3*random.random()))]
circuit = Qubit(int(random.random()*2), int(random.random()*2),
int(random.random()*2), int(random.random()*2), int(random.random()*2),
int(random.random()*2))
for i in range(int(random.random()*6)):
circuit = gates[int(random.random()*6)]*circuit
mat = represent(circuit, nqubits=6)
states = qapply(circuit)
state_rep = matrix_to_qubit(mat)
states = states.expand()
state_rep = state_rep.expand()
assert state_rep == states
def test_matrix_to_qubits():
qb = Qubit(0, 0, 0, 0)
mat = Matrix([1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
assert matrix_to_qubit(mat) == qb
assert qubit_to_matrix(qb) == mat
state = 2*sqrt(2)*(Qubit(0, 0, 0) + Qubit(0, 0, 1) + Qubit(0, 1, 0) +
Qubit(0, 1, 1) + Qubit(1, 0, 0) + Qubit(1, 0, 1) +
Qubit(1, 1, 0) + Qubit(1, 1, 1))
ones = sqrt(2)*2*Matrix([1, 1, 1, 1, 1, 1, 1, 1])
assert matrix_to_qubit(ones) == state.expand()
assert qubit_to_matrix(state) == ones
def test_measure_normalize():
a, b = symbols('a b')
state = a*Qubit('110') + b*Qubit('111')
assert measure_partial(state, (0,), normalize=False) == \
[(a*Qubit('110'), a*a.conjugate()), (b*Qubit('111'), b*b.conjugate())]
assert measure_all(state, normalize=False) == \
[(Qubit('110'), a*a.conjugate()), (Qubit('111'), b*b.conjugate())]
def test_measure_partial():
#Basic test of collapse of entangled two qubits (Bell States)
state = Qubit('01') + Qubit('10')
assert measure_partial(state, (0,)) == \
[(Qubit('10'), Rational(1, 2)), (Qubit('01'), Rational(1, 2))]
assert measure_partial(state, long(0)) == \
[(Qubit('10'), Rational(1, 2)), (Qubit('01'), Rational(1, 2))]
assert measure_partial(state, (0,)) == \
measure_partial(state, (1,))[::-1]
#Test of more complex collapse and probability calculation
state1 = sqrt(2)/sqrt(3)*Qubit('00001') + 1/sqrt(3)*Qubit('11111')
assert measure_partial(state1, (0,)) == \
[(sqrt(2)/sqrt(3)*Qubit('00001') + 1/sqrt(3)*Qubit('11111'), 1)]
assert measure_partial(state1, (1, 2)) == measure_partial(state1, (3, 4))
assert measure_partial(state1, (1, 2, 3)) == \
[(Qubit('00001'), Rational(2, 3)), (Qubit('11111'), Rational(1, 3))]
#test of measuring multiple bits at once
state2 = Qubit('1111') + Qubit('1101') + Qubit('1011') + Qubit('1000')
assert measure_partial(state2, (0, 1, 3)) == \
[(Qubit('1000'), Rational(1, 4)), (Qubit('1101'), Rational(1, 4)),
(Qubit('1011')/sqrt(2) + Qubit('1111')/sqrt(2), Rational(1, 2))]
assert measure_partial(state2, (0,)) == \
[(Qubit('1000'), Rational(1, 4)),
(Qubit('1111')/sqrt(3) + Qubit('1101')/sqrt(3) +
Qubit('1011')/sqrt(3), Rational(3, 4))]
def test_measure_all():
|
def test_eval_trace():
q1 = Qubit('10110')
q2 = Qubit('01010')
d = Density([q1, 0.6], [q2, 0.4])
t = Tr(d)
assert t.doit() == 1
# extreme bits
t = Tr(d, 0)
assert t.doit() == (0.4*Density([Qubit('0101'), 1]) +
0.6*Density([Qubit('1011'), 1]))
t = Tr(d, 4)
assert t.doit() == (0.4*Density([Qubit('1010'), 1]) +
0.6*Density([Qubit('0110'), 1]))
# index somewhere in between
t = Tr(d, 2)
assert t.doit() == (0.4*Density([Qubit('0110'), 1]) +
0.6*Density([Qubit('1010'), 1]))
#trace all indices
t = Tr(d, [0, 1, 2, 3, 4])
assert t.doit() == 1
# trace some indices, initialized in
# non-canonical order
t = Tr(d, [2, 1, 3])
assert t.doit() == (0.4*Density([Qubit('00'), 1]) +
0.6*Density([Qubit('10'), 1]))
# mixed states
q = (1/sqrt(2)) * (Qubit('00') + Qubit('11'))
d = Density( [q, 1.0] )
t = Tr(d, 0)
assert t.doit() == (0.5*Density([Qubit('0'), 1]) +
0.5*Density([Qubit('1'), 1]))
def test_matrix_to_density():
mat = Matrix([[0, 0], [0, 1]])
assert matrix_to_density(mat) == Density([Qubit('1'), 1])
mat = Matrix([[1, 0], [0, 0]])
assert matrix_to_density(mat) == Density([Qubit('0'), 1])
mat = Matrix([[0, 0], [0, 0]])
assert matrix_to_density(mat) == 0
mat = Matrix([[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 1, 0],
[0, 0, 0, 0]])
assert matrix_to_density(mat) == Density([Qubit('10'), 1])
mat = Matrix([[1, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0]])
assert matrix_to_density(mat) == Density([Qubit('00'), 1])
| assert measure_all(Qubit('11')) == [(Qubit('11'), 1)]
state = Qubit('11') + Qubit('10')
assert measure_all(state) == [(Qubit('10'), Rational(1, 2)),
(Qubit('11'), Rational(1, 2))]
state2 = Qubit('11')/sqrt(5) + 2*Qubit('00')/sqrt(5)
assert measure_all(state2) == \
[(Qubit('00'), Rational(4, 5)), (Qubit('11'), Rational(1, 5))]
# from issue #12585
assert measure_all(qapply(Qubit('0'))) == [(Qubit('0'), 1)] |
create_constraint_parameters.go | // Code generated by go-swagger; DO NOT EDIT.
package constraint
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"context"
"io"
"net/http"
"github.com/go-openapi/errors"
"github.com/go-openapi/runtime"
"github.com/go-openapi/runtime/middleware"
"github.com/go-openapi/strfmt"
"github.com/go-openapi/swag"
"github.com/go-openapi/validate"
"github.com/checkr/flagr/swagger_gen/models"
)
// NewCreateConstraintParams creates a new CreateConstraintParams object
//
// There are no default values defined in the spec.
func NewCreateConstraintParams() CreateConstraintParams {
return CreateConstraintParams{}
}
// CreateConstraintParams contains all the bound params for the create constraint operation
// typically these are obtained from a http.Request
//
// swagger:parameters createConstraint
type CreateConstraintParams struct {
// HTTP Request Object
HTTPRequest *http.Request `json:"-"`
/*create a constraint
Required: true
In: body
*/
Body *models.CreateConstraintRequest
/*numeric ID of the flag
Required: true
Minimum: 1
In: path
*/
FlagID int64
/*numeric ID of the segment
Required: true
Minimum: 1
In: path
*/
SegmentID int64
}
// BindRequest both binds and validates a request, it assumes that complex things implement a Validatable(strfmt.Registry) error interface
// for simple values it will use straight method calls.
//
// To ensure default values, the struct must have been initialized with NewCreateConstraintParams() beforehand.
func (o *CreateConstraintParams) BindRequest(r *http.Request, route *middleware.MatchedRoute) error {
var res []error
o.HTTPRequest = r
if runtime.HasBody(r) {
defer r.Body.Close()
var body models.CreateConstraintRequest
if err := route.Consumer.Consume(r.Body, &body); err != nil {
if err == io.EOF {
res = append(res, errors.Required("body", "body", ""))
} else {
res = append(res, errors.NewParseError("body", "body", "", err))
}
} else {
// validate body object
if err := body.Validate(route.Formats); err != nil {
res = append(res, err)
}
ctx := validate.WithOperationRequest(context.Background())
if err := body.ContextValidate(ctx, route.Formats); err != nil {
res = append(res, err)
}
if len(res) == 0 {
o.Body = &body
}
}
} else {
res = append(res, errors.Required("body", "body", ""))
}
rFlagID, rhkFlagID, _ := route.Params.GetOK("flagID")
if err := o.bindFlagID(rFlagID, rhkFlagID, route.Formats); err != nil {
res = append(res, err)
}
rSegmentID, rhkSegmentID, _ := route.Params.GetOK("segmentID")
if err := o.bindSegmentID(rSegmentID, rhkSegmentID, route.Formats); err != nil {
res = append(res, err)
}
if len(res) > 0 {
return errors.CompositeValidationError(res...)
}
return nil
}
// bindFlagID binds and validates parameter FlagID from path.
func (o *CreateConstraintParams) bindFlagID(rawData []string, hasKey bool, formats strfmt.Registry) error {
var raw string
if len(rawData) > 0 {
raw = rawData[len(rawData)-1]
}
// Required: true
// Parameter is provided by construction from the route
value, err := swag.ConvertInt64(raw)
if err != nil {
return errors.InvalidType("flagID", "path", "int64", raw)
}
o.FlagID = value
if err := o.validateFlagID(formats); err != nil {
return err
}
return nil
}
// validateFlagID carries on validations for parameter FlagID
func (o *CreateConstraintParams) validateFlagID(formats strfmt.Registry) error {
if err := validate.MinimumInt("flagID", "path", o.FlagID, 1, false); err != nil {
return err
}
return nil
}
// bindSegmentID binds and validates parameter SegmentID from path.
func (o *CreateConstraintParams) bindSegmentID(rawData []string, hasKey bool, formats strfmt.Registry) error {
var raw string
if len(rawData) > 0 {
raw = rawData[len(rawData)-1]
} | value, err := swag.ConvertInt64(raw)
if err != nil {
return errors.InvalidType("segmentID", "path", "int64", raw)
}
o.SegmentID = value
if err := o.validateSegmentID(formats); err != nil {
return err
}
return nil
}
// validateSegmentID carries on validations for parameter SegmentID
func (o *CreateConstraintParams) validateSegmentID(formats strfmt.Registry) error {
if err := validate.MinimumInt("segmentID", "path", o.SegmentID, 1, false); err != nil {
return err
}
return nil
} |
// Required: true
// Parameter is provided by construction from the route
|
colorer.go | package views
import (
"strings"
"github.com/derailed/k9s/resource"
"github.com/gdamore/tcell"
"k8s.io/apimachinery/pkg/watch"
)
const (
modColor = tcell.ColorGreenYellow
addColor = tcell.ColorLightSkyBlue
errColor = tcell.ColorOrangeRed
stdColor = tcell.ColorWhite
highlightColor = tcell.ColorAqua
killColor = tcell.ColorMediumPurple
)
func defaultColorer(ns string, r *resource.RowEvent) tcell.Color {
c := stdColor
switch r.Action {
case watch.Added:
c = addColor
case watch.Modified:
c = modColor
}
return c
}
func podColorer(ns string, r *resource.RowEvent) tcell.Color {
c := defaultColorer(ns, r)
statusCol := 3
if len(ns) != 0 {
statusCol = 2
}
switch strings.TrimSpace(r.Fields[statusCol]) {
case "Running", "Initialized", "Completed", "Terminating":
default:
c = errColor
}
readyCol := 2
if len(ns) != 0 {
readyCol = 1
}
tokens := strings.Split(strings.TrimSpace(r.Fields[readyCol]), "/")
if len(tokens) == 2 && (tokens[0] == "0" || tokens[0] != tokens[1]) {
if strings.TrimSpace(r.Fields[statusCol]) != "Completed" {
c = errColor
}
}
return c
}
func ctxColorer(ns string, r *resource.RowEvent) tcell.Color {
c := defaultColorer(ns, r)
if r.Action == watch.Added || r.Action == watch.Modified {
return c
}
if strings.Contains(strings.TrimSpace(r.Fields[0]), "*") {
c = highlightColor
}
return c
}
func pvColorer(ns string, r *resource.RowEvent) tcell.Color {
c := defaultColorer(ns, r)
if r.Action == watch.Added || r.Action == watch.Modified {
return c
}
if strings.TrimSpace(r.Fields[4]) != "Bound" {
return errColor
}
return stdColor
}
func pvcColorer(ns string, r *resource.RowEvent) tcell.Color {
c := defaultColorer(ns, r)
if r.Action == watch.Added || r.Action == watch.Modified {
return c
}
markCol := 2
if ns != resource.AllNamespaces {
markCol = 1
}
if strings.TrimSpace(r.Fields[markCol]) != "Bound" {
c = errColor
}
return c
}
func dpColorer(ns string, r *resource.RowEvent) tcell.Color {
c := defaultColorer(ns, r)
if r.Action == watch.Added || r.Action == watch.Modified {
return c
}
markCol := 2
if ns != resource.AllNamespaces {
markCol = 1
}
if strings.TrimSpace(r.Fields[markCol]) != strings.TrimSpace(r.Fields[markCol+1]) {
return errColor
}
return stdColor
}
func stsColorer(ns string, r *resource.RowEvent) tcell.Color {
c := defaultColorer(ns, r)
if r.Action == watch.Added || r.Action == watch.Modified {
return c
}
markCol := 2
if ns != resource.AllNamespaces {
markCol = 1
}
if strings.TrimSpace(r.Fields[markCol]) != strings.TrimSpace(r.Fields[markCol+1]) {
return errColor
}
return stdColor
}
func rsColorer(ns string, r *resource.RowEvent) tcell.Color |
func evColorer(ns string, r *resource.RowEvent) tcell.Color {
c := defaultColorer(ns, r)
markCol := 3
if ns != resource.AllNamespaces {
markCol = 2
}
switch strings.TrimSpace(r.Fields[markCol]) {
case "Failed":
c = errColor
case "Killing":
c = killColor
}
return c
}
func nsColorer(ns string, r *resource.RowEvent) tcell.Color {
c := defaultColorer(ns, r)
if r.Action == watch.Added || r.Action == watch.Modified {
return c
}
switch strings.TrimSpace(r.Fields[1]) {
case "Inactive", "Terminating":
c = errColor
}
if strings.Contains(strings.TrimSpace(r.Fields[0]), "*") {
c = highlightColor
}
return c
}
| {
c := defaultColorer(ns, r)
if r.Action == watch.Added || r.Action == watch.Modified {
return c
}
markCol := 2
if ns != resource.AllNamespaces {
markCol = 1
}
if strings.TrimSpace(r.Fields[markCol]) != strings.TrimSpace(r.Fields[markCol+1]) {
return errColor
}
return stdColor
} |
Tooltip.d.ts | import * as React from 'react';
import * as Popper from 'popper.js';
import { CSSModule } from '../index';
export type UncontrolledProps<T = {}> = React.HTMLAttributes<HTMLElement> & {
target: string | HTMLElement;
container?: string | HTMLElement;
delay?: number | {show: number, hide: number};
className?: string;
innerClassName?: string;
autohide?: boolean;
placement?: Popper.Placement;
modifiers?: Popper.Modifiers;
cssModule?: CSSModule;
fade?: boolean;
flip?: boolean;
} & T;
export type UncontrolledTooltipProps<T = {}> = UncontrolledProps<T>;
export type TooltipProps<T = {}> = UncontrolledTooltipProps<T> & {
toggle?: () => void;
isOpen?: boolean;
};
declare class Tooltip<T> extends React.Component<TooltipProps<T>> {} | export default Tooltip; |
|
Theme.tsx | import { css, Global } from '@emotion/react';
export const Theme = () => {
return (
<Global
styles={css`
* {
box-sizing: border-box;
}
:root {
--base-background: #f4f4f9;
--base-foreground: rgb(32, 32, 32);
--header-background: #383f51ff;
--header-foreground: #f4f4f9;
--header-active: #ce8147ff;
--primary-background: #3c4f76ff;
--primary-background-medium: #3c4f7677;
--primary-background-light: #3c4f7644;
--primary-background-lightest: #3c4f7611;
--primary-foreground: #f4f4f9;
--primary-active: #ce8147ff;
--primary-outline: #dddbf1ff;
--primary-ripple: #3c4f76cc;
--menu-active: rgba(0, 0, 0, 0.04);
--font-family: 'Poppins', sans-serif;
--secondary-font-family: 'Open Sans', sans-serif;
--surface-background: rgba(255, 255, 255, 1);
--surface-foreground: #212121ff;
--surface-foreground-medium: #212121aa;
--surface-outline: var(--primary-outline);
--well-background: #e4e6e7ff;
--well-foreground: #595a5aff;
--well-outline: #e4e6e7ff;
--error-background: #b00020;
--error-foreground: #ffffff;
--error-outline: #b00020;
--warning-background: #fab005;
--warning-foreground: #ffffff;
--warning-outline: #fab005;
--success-background: #23b818;
--success-foreground: #ffffff;
--success-outline: #23b818;
}
button,
a.button {
font-family: var(--button-font-family);
font-size: 1rem;
font-weight: normal;
outline: none;
background: var(--surface-background);
border: 1px solid transparent;
border: 1px solid var(--primary-background-light);
color: var(--primary-background);
padding: 0.5rem;
border-radius: 0.25rem;
min-width: 5rem;
user-select: none;
cursor: pointer;
background-position: center;
transition: box-shadow 200ms ease-in-out, background 0.8s,
color 200ms ease-in-out, border-color 320ms ease-in-out;
&:disabled {
cursor: auto;
}
&.primary {
background-color: var(--primary-background);
border-color: var(--primary-background);
color: var(--primary-foreground);
&:hover {
background: var(--primary-background)
radial-gradient(
circle,
transparent 1%,
var(--primary-background) 1%
)
center/15000%;
}
&:active {
background-color: var(--primary-ripple);
background-size: 100%;
transition: background-color 0s, background-size 0s;
}
}
&:hover {
border-color: var(--primary-background);
background: var(--primary-background-lightest)
radial-gradient(
circle,
transparent 1%,
var(--primary-background-lightest) 1%
)
center/15000%;
}
&:active {
background-color: var(--surface-background);
background-size: 100%;
transition: background-color 0s, background-size 0s;
}
}
button.link,
a.button.link {
background-color: transparent;
min-height: 2rem;
border-color: transparent;
&:hover {
color: var(--base-foreground);
box-shadow: none;
background: rgba(0, 0, 0, 0.05)
radial-gradient(circle, transparent 1%, rgba(0, 0, 0, 0.05) 1%)
center/18000%;
}
&:active {
background-color: var(--surface-background);
background-size: 100%;
transition: background-color 0s, background-size 0s;
}
}
.surface {
border-radius: 2px;
background-color: var(--surface-background);
color: var(--surface-foreground);
}
*[class^='surface-level-'],
*[class*=' surface-level-'] {
transition: box-shadow 500ms ease-in-out;
}
.surface-level-1 {
box-shadow: 0 1px 3px rgba(0, 0, 0, 0.12),
0 1px 2px rgba(0, 0, 0, 0.24);
}
.surface-hover.surface-level-1:hover {
box-shadow: 0 3px 5px rgba(0, 0, 0, 0.12),
0 3px 4px rgba(0, 0, 0, 0.24);
}
.surface-level-2 {
box-shadow: 0 3px 6px rgba(0, 0, 0, 0.16),
0 3px 6px rgba(0, 0, 0, 0.23);
}
.surface-level-3 {
box-shadow: 0 10px 20px rgba(0, 0, 0, 0.19),
0 6px 6px rgba(0, 0, 0, 0.23);
}
.surface-level-4 {
box-shadow: 0 14px 28px rgba(0, 0, 0, 0.25),
0 10px 10px rgba(0, 0, 0, 0.22);
}
.surface-level-5 {
box-shadow: 0 19px 38px rgba(0, 0, 0, 0.3),
0 15px 12px rgba(0, 0, 0, 0.22);
}
body,
html {
padding: 0;
margin: 0;
height: 100%;
width: 100%;
font-family: var(--font-family);
color: var(--base-foreground);
background-color: var(--base-background);
font-size: 16px;
}
#root {
height: 100vh;
display: flex;
flex-direction: column;
}
a,
a:hover,
a:visited,
a:active {
color: #008bf5ff;
text-decoration: none;
transition: color 200ms ease-in-out;
} | }
`}
/>
);
}; |
a:hover {
color: #005ca3ff; |
main.rs | // Copyright 2018 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#![feature(futures_api, await_macro, async_await)]
// Macros used to serialize bonding data FIDL types for persistent storage.
#[macro_use]
extern crate serde_derive;
use {
failure::{Error, ResultExt},
fidl::endpoints::ServiceMarker,
fidl_fuchsia_bluetooth_bredr::ProfileMarker,
fidl_fuchsia_bluetooth_control::ControlRequestStream,
fidl_fuchsia_bluetooth_gatt::Server_Marker,
fidl_fuchsia_bluetooth_le::{CentralMarker, PeripheralMarker},
fuchsia_async as fasync,
fuchsia_bluetooth::util,
fuchsia_component::server::ServiceFs,
fuchsia_syslog::{self as syslog, fx_log_err, fx_log_info, fx_log_warn},
futures::{FutureExt, StreamExt, TryFutureExt, TryStreamExt},
};
use crate::{
adapters::{AdapterEvent::*, *},
host_dispatcher::{HostService::*, *},
};
mod services;
mod store;
mod adapters;
mod host_device;
mod host_dispatcher;
const BT_GAP_COMPONENT_ID: &'static str = "bt-gap";
fn main() -> Result<(), Error> {
syslog::init_with_tags(&["bt-gap"]).expect("Can't init logger");
fx_log_info!("Starting bt-gap...");
let result = run().context("Error running BT-GAP");
if let Err(e) = &result {
fx_log_err!("{:?}", e)
};
Ok(result?)
}
fn run() -> Result<(), Error> |
fn control_service(hd: HostDispatcher, stream: ControlRequestStream) {
fx_log_info!("Spawning Control Service");
fasync::spawn(
services::start_control_service(hd.clone(), stream)
.unwrap_or_else(|e| eprintln!("Failed to spawn {:?}", e)),
)
}
| {
let mut executor = fasync::Executor::new().context("Error creating executor")?;
let stash = executor
.run_singlethreaded(store::stash::init_stash(BT_GAP_COMPONENT_ID))
.context("Error initializing Stash service")?;
let hd = HostDispatcher::new(stash);
let watch_hd = hd.clone();
let central_hd = hd.clone();
let control_hd = hd.clone();
let peripheral_hd = hd.clone();
let profile_hd = hd.clone();
let gatt_hd = hd.clone();
let host_watcher = watch_hosts().try_for_each(move |msg| {
let hd = watch_hd.clone();
async {
match msg {
AdapterAdded(device_path) => {
let result = await!(hd.add_adapter(&device_path));
if let Err(e) = &result {
fx_log_warn!("Error adding bt-host device '{:?}': {:?}", device_path, e);
}
result
}
AdapterRemoved(device_path) => {
hd.rm_adapter(&device_path);
Ok(())
}
}
}
});
let mut fs = ServiceFs::new();
fs.dir("public")
.add_fidl_service(move |s| control_service(control_hd.clone(), s))
.add_service_at(CentralMarker::NAME, move |chan| {
if let Ok(chan) = fasync::Channel::from_channel(chan) {
fx_log_info!("Connecting CentralService to Adapter");
fasync::spawn(central_hd.clone().request_host_service(chan, LeCentral));
}
None
})
.add_service_at(PeripheralMarker::NAME, move |chan| {
if let Ok(chan) = fasync::Channel::from_channel(chan) {
fx_log_info!("Connecting Peripheral Service to Adapter");
fasync::spawn(peripheral_hd.clone().request_host_service(chan, LePeripheral));
}
None
})
.add_service_at(ProfileMarker::NAME, move |chan| {
if let Ok(chan) = fasync::Channel::from_channel(chan) {
fx_log_info!("Connecting Profile Service to Adapter");
fasync::spawn(profile_hd.clone().request_host_service(chan, Profile));
}
None
})
.add_service_at(Server_Marker::NAME, move |chan| {
if let Ok(chan) = fasync::Channel::from_channel(chan) {
fx_log_info!("Connecting Gatt Service to Adapter");
fasync::spawn(gatt_hd.clone().request_host_service(chan, LeGatt));
}
None
});
fs.take_and_serve_directory_handle()?;
executor.run_singlethreaded(fs.collect::<()>().map(Ok).try_join(host_watcher))
.map(|((), ())| ())
} |
cli.rs | //! Command-line interface to this crate's functionality
use clap::{self, crate_authors, crate_version, App, Arg, SubCommand};
// duplicated here, because when using this `cli` module in build.rs
// to generate shell completions, there is no `lpc55` crate yet
pub const KEYSTORE_KEY_NAMES: [&'static str; 6] = [
"secure-boot-kek",
"user-key",
"unique-device-secret",
"prince-region-0",
"prince-region-1",
"prince-region-2",
];
const ABOUT: &str = "
lpc55 offers various host-side utilities.
Use -h for short descriptions and --help for more details
Project homepage: https://github.com/lpc55/lpc55-host
";
pub fn app() -> clap::App<'static, 'static> {
// We need to specify our version in a static because we've painted clap
// into a corner. We've told it that every string we give it will be
// 'static, but we need to build the version string dynamically. We can
// fake the 'static lifetime with lazy_static.
lazy_static::lazy_static! {
static ref LONG_VERSION: String = long_version(None);
// static ref LONG_VERSION: String = long_version(Some("47e1f"));
}
let app = App::new("lpc55")
.author(crate_authors!())
.version(crate_version!())
.long_version(LONG_VERSION.as_str())
.about(ABOUT)
.help_message("Prints help information. Use --help for more details.")
.setting(clap::AppSettings::SubcommandRequiredElseHelp)
.arg(Arg::with_name("VID")
.long("vid")
.default_value("0x1fc9")
.help("VID of bootloader (hex)")
// even without this, `cmd -v subcommand` passes -v flag to subcommand's matches
// the difference is that now the parser allows user to `cmd subcommand -v`
.global(true)
)
.arg(Arg::with_name("PID")
.long("pid")
.default_value("0x0021")
.help("PID of bootloader (hex)")
// even without this, `cmd -v subcommand` passes -v flag to subcommand's matches
// the difference is that now the parser allows user to `cmd subcommand -v`
.global(true)
)
.arg(Arg::with_name("v")
.short("v")
.long("verbose")
.multiple(true)
.global(true)
.help("Sets the level of verbosity (use multiple times to increase: -v = INFO, -vv = DEBUG, -vvv = TRACE)"))
.subcommand(SubCommand::with_name("http")
.version(crate_version!())
.long_version(LONG_VERSION.as_str())
.visible_alias("h")
.about("Serve http API to bootloader connector")
.arg(Arg::with_name("ADDR")
.help("Address to bind to")
.long("addr")
.default_value("127.0.0.1")
)
.arg(Arg::with_name("PORT")
.help("Port to listen on")
.long("port")
.default_value("2020")
)
)
.subcommand(SubCommand::with_name("configure")
.version(crate_version!())
.long_version(LONG_VERSION.as_str())
.about("configure factory and customer settings")
.setting(clap::AppSettings::SubcommandRequiredElseHelp)
.subcommand(SubCommand::with_name("factory-settings")
.version(crate_version!())
.long_version(LONG_VERSION.as_str())
.about("make changes to factory settings page (CMPA)")
.arg(Arg::with_name("OUTPUT")
.short("o")
.long("output")
.value_name("OUTPUT")
.help("Output factory settings (CMPA) to a 512-byte file instead of writing to device.")
.required(false)
)
.arg(Arg::with_name("CONFIG")
.help("Configuration file containing settings")
.required(true)
)
)
.subcommand(SubCommand::with_name("customer-settings")
.version(crate_version!())
.long_version(LONG_VERSION.as_str())
.about("make changes to customer settings page (CFPA)")
.arg(Arg::with_name("OUTPUT")
.short("o")
.long("output")
.value_name("OUTPUT")
.help("Output customer settings (CFPA) to a 512-byte file instead of writing to device.")
.required(false)
)
.arg(Arg::with_name("CONFIG")
.help("Configuration file containing settings")
.required(true)
)
)
)
.subcommand(SubCommand::with_name("reboot")
.version(crate_version!())
.long_version(LONG_VERSION.as_str())
.about("reboot device")
)
.subcommand(SubCommand::with_name("keystore")
.version(crate_version!())
.long_version(LONG_VERSION.as_str())
.about("keystore interactions")
.setting(clap::AppSettings::SubcommandRequiredElseHelp)
.subcommand(SubCommand::with_name("enroll-puf")
.version(crate_version!())
.about("(re)initialize PUF, writing an activation code to the keystore")
)
.subcommand(SubCommand::with_name("read")
.version(crate_version!())
)
.subcommand(SubCommand::with_name("generate-key")
.version(crate_version!())
.long_version(LONG_VERSION.as_str())
.about("generate \"intrinsic\" key")
.arg(Arg::with_name("KEY")
.help("name of key code")
.required(true)
.possible_values(&KEYSTORE_KEY_NAMES)
)
.arg(Arg::with_name("LENGTH")
.help("length in bytes of key to be generated") // (typical values 16 or 32)")
.required(true)
// more are possible, but let's make things easy for ourselves
.possible_values(&[
"16",
"32",
])
)
)
.subcommand(SubCommand::with_name("set-key")
.version(crate_version!())
.long_version(LONG_VERSION.as_str())
.about("set key")
.arg(Arg::with_name("KEY")
.help("name of key code")
.required(true)
.possible_values(&KEYSTORE_KEY_NAMES)
)
.arg(Arg::with_name("KEYDATA_FILENAME")
.help("filename of file containing the raw key data bytes")
.required(true))
)
.subcommand(SubCommand::with_name("write-keys")
.version(crate_version!())
.about("store any previously generated keys (including PUF activation codes) to non-volatile memory, i.e., PFR keystore")
)
.subcommand(SubCommand::with_name("read-keys")
.version(crate_version!())
.about("ReadNonVolatile")
)
)
.subcommand(SubCommand::with_name("info")
.version(crate_version!())
.long_version(LONG_VERSION.as_str())
.visible_alias("i")
.about("query all properties from bootloader")
)
.subcommand(SubCommand::with_name("pfr")
.version(crate_version!())
.long_version(LONG_VERSION.as_str())
.about("read out and parse PFR")
.arg(Arg::with_name("FORMAT")
.help("Format to output the parsed PFR")
.long("format")
.default_value("json")
.possible_values(&[
"native",
"alt-native",
"json",
"json-pretty",
"raw",
"yaml",
"toml",
])
)
)
.subcommand(SubCommand::with_name("read-memory")
.version(crate_version!())
.long_version(LONG_VERSION.as_str())
.visible_aliases(&["r", "read"])
.about("read out memory")
.arg(Arg::with_name("ADDRESS")
.help("Address to start reading from")
.required(true))
.arg(Arg::with_name("LENGTH")
.help("Number of bytes to read")
.required(true))
.arg(Arg::with_name("OUTPUT")
.help("Sets the output file to use. If missing, hex-dumps to stdout.")
.short("o")
.long("output-file")
.takes_value(true))
)
.subcommand(SubCommand::with_name("receive-sb-file")
.version(crate_version!())
.long_version(LONG_VERSION.as_str())
.about("send SB2.1 file to target")
.arg(Arg::with_name("SB-FILE")
.help("Configuration file")
.required(true))
)
.subcommand(SubCommand::with_name("fingerprint-certificates")
.version(crate_version!())
.long_version(LONG_VERSION.as_str())
.about("calculate fingerprint of root certificates (aka ROTKH)")
.arg(Arg::with_name("CONFIG")
.help("Configuration file")
.required(true))
)
.subcommand(SubCommand::with_name("sign-fw")
.version(crate_version!())
.long_version(LONG_VERSION.as_str())
.about("sign firmware")
.arg(Arg::with_name("CONFIG")
.help("Configuration file")
.required(true))
)
.subcommand(SubCommand::with_name("assemble-sb")
.version(crate_version!())
.long_version(LONG_VERSION.as_str())
.about("assemble SB2.1 image")
.arg(Arg::with_name("CONFIG")
.help("Configuration file")
.required(true))
)
.subcommand(SubCommand::with_name("sb")
.version(crate_version!())
.long_version(LONG_VERSION.as_str())
.about("firmware commands")
.subcommand(SubCommand::with_name("show")
.version(crate_version!())
.long_version(LONG_VERSION.as_str())
.about("show information about file")
.arg(Arg::with_name("FILE")
.help("file to show")
.required(true))
)
)
;
app
}
/// Return the "long" format of lpc55's version string.
///
/// If a revision hash is given, then it is used. If one isn't given, then
/// the LPC55_BUILD_GIT_HASH env var is inspected for it. If that isn't set,
/// then a revision hash is not included in the version string returned.
pub fn long_version(revision_hash: Option<&str>) -> String | {
// Do we have a git hash?
// (Yes, if ripgrep was built on a machine with `git` installed.)
let hash = match revision_hash.or(option_env!("LPC55_BUILD_GIT_HASH")) {
None => String::new(),
Some(githash) => format!(" (rev {})", githash),
};
format!("{}{}", crate_version!(), hash)
} |
|
JoplinSettings.ts | import Setting, { SettingItem as InternalSettingItem } from 'lib/models/Setting';
import Plugin from '../Plugin';
import { SettingItem, SettingSection } from './types';
/**
* This API allows registering new settings and setting sections, as well as getting and setting settings. Once a setting has been registered it will appear in the config screen and be editable by the user.
*
* Settings are essentially key/value pairs.
*
* Note: Currently this API does **not** provide access to Joplin's built-in settings. This is by design as plugins that modify user settings could give unexpected results
*
* [View the demo plugin](https://github.com/laurent22/joplin/tree/dev/CliClient/tests/support/plugins/settings)
*/
export default class JoplinSettings {
private plugin_:Plugin = null;
constructor(plugin: Plugin) {
this.plugin_ = plugin;
}
// Ensures that the plugin settings and sections are within their own namespace, to prevent them from
// overwriting other plugin settings or the default settings.
private namespacedKey(key:string):string {
return `plugin-${this.plugin_.id}.${key}`;
}
/**
* Registers a new setting. Note that registering a setting item is dynamic and will be gone next time Joplin starts.
* What it means is that you need to register the setting every time the plugin starts (for example in the onStart event).
* The setting value however will be preserved from one launch to the next so there is no risk that it will be lost even if for some
* reason the plugin fails to start at some point.
*/
async registerSetting(key:string, settingItem:SettingItem) {
const internalSettingItem:InternalSettingItem = {
key: key, | type: settingItem.type,
public: settingItem.public,
label: () => settingItem.label,
description: (_appType:string) => settingItem.description,
};
if ('isEnum' in settingItem) internalSettingItem.isEnum = settingItem.isEnum;
if ('section' in settingItem) internalSettingItem.section = this.namespacedKey(settingItem.section);
if ('options' in settingItem) internalSettingItem.options = settingItem.options;
if ('appTypes' in settingItem) internalSettingItem.appTypes = settingItem.appTypes;
if ('secure' in settingItem) internalSettingItem.secure = settingItem.secure;
if ('advanced' in settingItem) internalSettingItem.advanced = settingItem.advanced;
if ('minimum' in settingItem) internalSettingItem.minimum = settingItem.minimum;
if ('maximum' in settingItem) internalSettingItem.maximum = settingItem.maximum;
if ('step' in settingItem) internalSettingItem.step = settingItem.step;
return Setting.registerSetting(this.namespacedKey(key), internalSettingItem);
}
/**
* Registers a new setting section. Like for registerSetting, it is dynamic and needs to be done every time the plugin starts.
*/
async registerSection(name:string, section:SettingSection) {
return Setting.registerSection(this.namespacedKey(name), section);
}
/**
* Gets a setting value (only applies to setting you registered from your plugin)
*/
async value(key:string):Promise<any> {
return Setting.value(this.namespacedKey(key));
}
/**
* Sets a setting value (only applies to setting you registered from your plugin)
*/
async setValue(key:string, value:any) {
return Setting.setValue(this.namespacedKey(key), value);
}
/**
* Gets a global setting value, including app-specific settings and those set by other plugins.
*
* The list of available settings is not documented yet, but can be found by looking at the source code:
*
* https://github.com/laurent22/joplin/blob/3539a452a359162c461d2849829d2d42973eab50/ReactNativeClient/lib/models/Setting.ts#L142
*/
async globalValue(key:string):Promise<any> {
return Setting.value(key);
}
} | value: settingItem.value, |
outputs.py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'PrivateEndpointConnectionResponse',
'PrivateEndpointResponse',
'PrivateLinkServiceConnectionStateResponse',
'ServiceAccessPolicyEntryResponse',
'ServiceAuthenticationConfigurationInfoResponse',
'ServiceCorsConfigurationInfoResponse',
'ServiceCosmosDbConfigurationInfoResponse',
'ServiceExportConfigurationInfoResponse',
'ServicesPropertiesResponse',
'ServicesResourceResponseIdentity',
]
@pulumi.output_type
class PrivateEndpointConnectionResponse(dict):
"""
The Private Endpoint Connection resource.
"""
def __init__(__self__, *,
id: str,
name: str,
private_link_service_connection_state: 'outputs.PrivateLinkServiceConnectionStateResponse',
provisioning_state: str,
type: str,
private_endpoint: Optional['outputs.PrivateEndpointResponse'] = None):
"""
The Private Endpoint Connection resource.
:param str id: Fully qualified resource ID for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}
:param str name: The name of the resource
:param 'PrivateLinkServiceConnectionStateResponseArgs' private_link_service_connection_state: A collection of information about the state of the connection between service consumer and provider.
:param str provisioning_state: The provisioning state of the private endpoint connection resource.
:param str type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
:param 'PrivateEndpointResponseArgs' private_endpoint: The resource of private end point.
"""
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "private_link_service_connection_state", private_link_service_connection_state)
pulumi.set(__self__, "provisioning_state", provisioning_state)
pulumi.set(__self__, "type", type)
if private_endpoint is not None:
pulumi.set(__self__, "private_endpoint", private_endpoint)
@property
@pulumi.getter
def id(self) -> str:
"""
Fully qualified resource ID for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="privateLinkServiceConnectionState")
def private_link_service_connection_state(self) -> 'outputs.PrivateLinkServiceConnectionStateResponse':
"""
A collection of information about the state of the connection between service consumer and provider.
"""
return pulumi.get(self, "private_link_service_connection_state")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
The provisioning state of the private endpoint connection resource.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter
def type(self) -> str:
"""
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="privateEndpoint")
def private_endpoint(self) -> Optional['outputs.PrivateEndpointResponse']:
"""
The resource of private end point.
"""
return pulumi.get(self, "private_endpoint")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class PrivateEndpointResponse(dict):
"""
The Private Endpoint resource.
"""
def __init__(__self__, *,
id: str):
"""
The Private Endpoint resource.
:param str id: The ARM identifier for Private Endpoint
"""
pulumi.set(__self__, "id", id)
@property
@pulumi.getter
def id(self) -> str:
"""
The ARM identifier for Private Endpoint
"""
return pulumi.get(self, "id")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class PrivateLinkServiceConnectionStateResponse(dict):
"""
A collection of information about the state of the connection between service consumer and provider.
"""
def __init__(__self__, *,
actions_required: Optional[str] = None,
description: Optional[str] = None,
status: Optional[str] = None):
"""
A collection of information about the state of the connection between service consumer and provider.
:param str actions_required: A message indicating if changes on the service provider require any updates on the consumer.
:param str description: The reason for approval/rejection of the connection.
:param str status: Indicates whether the connection has been Approved/Rejected/Removed by the owner of the service.
"""
if actions_required is not None:
pulumi.set(__self__, "actions_required", actions_required)
if description is not None:
pulumi.set(__self__, "description", description)
if status is not None:
pulumi.set(__self__, "status", status)
@property
@pulumi.getter(name="actionsRequired")
def actions_required(self) -> Optional[str]:
"""
A message indicating if changes on the service provider require any updates on the consumer.
"""
return pulumi.get(self, "actions_required")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
The reason for approval/rejection of the connection.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def status(self) -> Optional[str]:
"""
Indicates whether the connection has been Approved/Rejected/Removed by the owner of the service.
"""
return pulumi.get(self, "status")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ServiceAccessPolicyEntryResponse(dict):
"""
An access policy entry.
"""
def __init__(__self__, *,
object_id: str):
"""
An access policy entry.
:param str object_id: An Azure AD object ID (User or Apps) that is allowed access to the FHIR service.
"""
pulumi.set(__self__, "object_id", object_id)
@property
@pulumi.getter(name="objectId")
def object_id(self) -> str:
"""
An Azure AD object ID (User or Apps) that is allowed access to the FHIR service.
"""
return pulumi.get(self, "object_id")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ServiceAuthenticationConfigurationInfoResponse(dict):
"""
Authentication configuration information
"""
def __init__(__self__, *,
audience: Optional[str] = None,
authority: Optional[str] = None,
smart_proxy_enabled: Optional[bool] = None):
"""
Authentication configuration information
:param str audience: The audience url for the service
:param str authority: The authority url for the service
:param bool smart_proxy_enabled: If the SMART on FHIR proxy is enabled
"""
if audience is not None:
pulumi.set(__self__, "audience", audience)
if authority is not None:
pulumi.set(__self__, "authority", authority)
if smart_proxy_enabled is not None:
pulumi.set(__self__, "smart_proxy_enabled", smart_proxy_enabled)
@property
@pulumi.getter
def audience(self) -> Optional[str]:
"""
The audience url for the service
"""
return pulumi.get(self, "audience")
@property
@pulumi.getter
def authority(self) -> Optional[str]:
"""
The authority url for the service
"""
return pulumi.get(self, "authority")
@property
@pulumi.getter(name="smartProxyEnabled")
def smart_proxy_enabled(self) -> Optional[bool]:
"""
If the SMART on FHIR proxy is enabled
"""
return pulumi.get(self, "smart_proxy_enabled")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ServiceCorsConfigurationInfoResponse(dict):
"""
The settings for the CORS configuration of the service instance.
"""
def __init__(__self__, *,
allow_credentials: Optional[bool] = None,
headers: Optional[Sequence[str]] = None,
max_age: Optional[int] = None,
methods: Optional[Sequence[str]] = None,
origins: Optional[Sequence[str]] = None):
"""
The settings for the CORS configuration of the service instance.
:param bool allow_credentials: If credentials are allowed via CORS.
:param Sequence[str] headers: The headers to be allowed via CORS.
:param int max_age: The max age to be allowed via CORS.
:param Sequence[str] methods: The methods to be allowed via CORS.
:param Sequence[str] origins: The origins to be allowed via CORS.
"""
if allow_credentials is not None:
pulumi.set(__self__, "allow_credentials", allow_credentials)
if headers is not None:
pulumi.set(__self__, "headers", headers)
if max_age is not None: | pulumi.set(__self__, "methods", methods)
if origins is not None:
pulumi.set(__self__, "origins", origins)
@property
@pulumi.getter(name="allowCredentials")
def allow_credentials(self) -> Optional[bool]:
"""
If credentials are allowed via CORS.
"""
return pulumi.get(self, "allow_credentials")
@property
@pulumi.getter
def headers(self) -> Optional[Sequence[str]]:
"""
The headers to be allowed via CORS.
"""
return pulumi.get(self, "headers")
@property
@pulumi.getter(name="maxAge")
def max_age(self) -> Optional[int]:
"""
The max age to be allowed via CORS.
"""
return pulumi.get(self, "max_age")
@property
@pulumi.getter
def methods(self) -> Optional[Sequence[str]]:
"""
The methods to be allowed via CORS.
"""
return pulumi.get(self, "methods")
@property
@pulumi.getter
def origins(self) -> Optional[Sequence[str]]:
"""
The origins to be allowed via CORS.
"""
return pulumi.get(self, "origins")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ServiceCosmosDbConfigurationInfoResponse(dict):
"""
The settings for the Cosmos DB database backing the service.
"""
def __init__(__self__, *,
key_vault_key_uri: Optional[str] = None,
offer_throughput: Optional[int] = None):
"""
The settings for the Cosmos DB database backing the service.
:param str key_vault_key_uri: The URI of the customer-managed key for the backing database.
:param int offer_throughput: The provisioned throughput for the backing database.
"""
if key_vault_key_uri is not None:
pulumi.set(__self__, "key_vault_key_uri", key_vault_key_uri)
if offer_throughput is not None:
pulumi.set(__self__, "offer_throughput", offer_throughput)
@property
@pulumi.getter(name="keyVaultKeyUri")
def key_vault_key_uri(self) -> Optional[str]:
"""
The URI of the customer-managed key for the backing database.
"""
return pulumi.get(self, "key_vault_key_uri")
@property
@pulumi.getter(name="offerThroughput")
def offer_throughput(self) -> Optional[int]:
"""
The provisioned throughput for the backing database.
"""
return pulumi.get(self, "offer_throughput")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ServiceExportConfigurationInfoResponse(dict):
"""
Export operation configuration information
"""
def __init__(__self__, *,
storage_account_name: Optional[str] = None):
"""
Export operation configuration information
:param str storage_account_name: The name of the default export storage account.
"""
if storage_account_name is not None:
pulumi.set(__self__, "storage_account_name", storage_account_name)
@property
@pulumi.getter(name="storageAccountName")
def storage_account_name(self) -> Optional[str]:
"""
The name of the default export storage account.
"""
return pulumi.get(self, "storage_account_name")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ServicesPropertiesResponse(dict):
"""
The properties of a service instance.
"""
def __init__(__self__, *,
provisioning_state: str,
access_policies: Optional[Sequence['outputs.ServiceAccessPolicyEntryResponse']] = None,
authentication_configuration: Optional['outputs.ServiceAuthenticationConfigurationInfoResponse'] = None,
cors_configuration: Optional['outputs.ServiceCorsConfigurationInfoResponse'] = None,
cosmos_db_configuration: Optional['outputs.ServiceCosmosDbConfigurationInfoResponse'] = None,
export_configuration: Optional['outputs.ServiceExportConfigurationInfoResponse'] = None,
private_endpoint_connections: Optional[Sequence['outputs.PrivateEndpointConnectionResponse']] = None,
public_network_access: Optional[str] = None):
"""
The properties of a service instance.
:param str provisioning_state: The provisioning state.
:param Sequence['ServiceAccessPolicyEntryResponseArgs'] access_policies: The access policies of the service instance.
:param 'ServiceAuthenticationConfigurationInfoResponseArgs' authentication_configuration: The authentication configuration for the service instance.
:param 'ServiceCorsConfigurationInfoResponseArgs' cors_configuration: The settings for the CORS configuration of the service instance.
:param 'ServiceCosmosDbConfigurationInfoResponseArgs' cosmos_db_configuration: The settings for the Cosmos DB database backing the service.
:param 'ServiceExportConfigurationInfoResponseArgs' export_configuration: The settings for the export operation of the service instance.
:param Sequence['PrivateEndpointConnectionResponseArgs'] private_endpoint_connections: The list of private endpoint connections that are set up for this resource.
:param str public_network_access: Control permission for data plane traffic coming from public networks while private endpoint is enabled.
"""
pulumi.set(__self__, "provisioning_state", provisioning_state)
if access_policies is not None:
pulumi.set(__self__, "access_policies", access_policies)
if authentication_configuration is not None:
pulumi.set(__self__, "authentication_configuration", authentication_configuration)
if cors_configuration is not None:
pulumi.set(__self__, "cors_configuration", cors_configuration)
if cosmos_db_configuration is not None:
pulumi.set(__self__, "cosmos_db_configuration", cosmos_db_configuration)
if export_configuration is not None:
pulumi.set(__self__, "export_configuration", export_configuration)
if private_endpoint_connections is not None:
pulumi.set(__self__, "private_endpoint_connections", private_endpoint_connections)
if public_network_access is not None:
pulumi.set(__self__, "public_network_access", public_network_access)
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
The provisioning state.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="accessPolicies")
def access_policies(self) -> Optional[Sequence['outputs.ServiceAccessPolicyEntryResponse']]:
"""
The access policies of the service instance.
"""
return pulumi.get(self, "access_policies")
@property
@pulumi.getter(name="authenticationConfiguration")
def authentication_configuration(self) -> Optional['outputs.ServiceAuthenticationConfigurationInfoResponse']:
"""
The authentication configuration for the service instance.
"""
return pulumi.get(self, "authentication_configuration")
@property
@pulumi.getter(name="corsConfiguration")
def cors_configuration(self) -> Optional['outputs.ServiceCorsConfigurationInfoResponse']:
"""
The settings for the CORS configuration of the service instance.
"""
return pulumi.get(self, "cors_configuration")
@property
@pulumi.getter(name="cosmosDbConfiguration")
def cosmos_db_configuration(self) -> Optional['outputs.ServiceCosmosDbConfigurationInfoResponse']:
"""
The settings for the Cosmos DB database backing the service.
"""
return pulumi.get(self, "cosmos_db_configuration")
@property
@pulumi.getter(name="exportConfiguration")
def export_configuration(self) -> Optional['outputs.ServiceExportConfigurationInfoResponse']:
"""
The settings for the export operation of the service instance.
"""
return pulumi.get(self, "export_configuration")
@property
@pulumi.getter(name="privateEndpointConnections")
def private_endpoint_connections(self) -> Optional[Sequence['outputs.PrivateEndpointConnectionResponse']]:
"""
The list of private endpoint connections that are set up for this resource.
"""
return pulumi.get(self, "private_endpoint_connections")
@property
@pulumi.getter(name="publicNetworkAccess")
def public_network_access(self) -> Optional[str]:
"""
Control permission for data plane traffic coming from public networks while private endpoint is enabled.
"""
return pulumi.get(self, "public_network_access")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ServicesResourceResponseIdentity(dict):
"""
Setting indicating whether the service has a managed identity associated with it.
"""
def __init__(__self__, *,
principal_id: str,
tenant_id: str,
type: Optional[str] = None):
"""
Setting indicating whether the service has a managed identity associated with it.
:param str principal_id: The principal ID of the resource identity.
:param str tenant_id: The tenant ID of the resource.
:param str type: Type of identity being specified, currently SystemAssigned and None are allowed.
"""
pulumi.set(__self__, "principal_id", principal_id)
pulumi.set(__self__, "tenant_id", tenant_id)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="principalId")
def principal_id(self) -> str:
"""
The principal ID of the resource identity.
"""
return pulumi.get(self, "principal_id")
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> str:
"""
The tenant ID of the resource.
"""
return pulumi.get(self, "tenant_id")
@property
@pulumi.getter
def type(self) -> Optional[str]:
"""
Type of identity being specified, currently SystemAssigned and None are allowed.
"""
return pulumi.get(self, "type")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop | pulumi.set(__self__, "max_age", max_age)
if methods is not None: |
main.go | package main
import (
"fmt"
"os"
"time"
"github.com/pkg/browser"
log "github.com/sirupsen/logrus"
"gopkg.in/alecthomas/kingpin.v2"
)
// VerboseFlag is flag for verbose
var (
// TestModeFlag enable behaviours useful for testing
// global flags
verboseFlag = kingpin.Flag("verbose", "Verbose Output").Short('v').Default("false").Bool()
// hidden global flags
useDefaultAPIKey = kingpin.Flag("use-default-api-key", "Use Default Whisk Api Key").Hidden().Default("false").Bool()
skipDockerVersion = kingpin.Flag("skip-docker-version", "Skip check of docker version").Hidden().Default("false").Bool()
skipPullImages = kingpin.Flag("skip-pull-images", "skip pull images").Hidden().Default("false").Bool()
skipOpenBrowser = kingpin.Flag("skip-open-browser", "skip pull images").Hidden().Default("false").Bool()
skipIde = kingpin.Flag("skip-ide", "skip starting ide").Hidden().Default("false").Bool()
// hidden debug commands
debugCmd = kingpin.Command("debug", "debug")
wskProps = debugCmd.Command("wskprops", "Create WskProps file")
ideDeployCmd = debugCmd.Command("ide-deploy", "Create IDE deployment")
ideDestroyCmd = debugCmd.Command("ide-destroy", "Destroy IDE deployment")
whiskDeployCmd = debugCmd.Command("whisk-deploy", "Create Whisk deployment")
whiskDestroyCmd = debugCmd.Command("whisk-destroy", "Destroy Whisk deployment")
redisDeployCmd = debugCmd.Command("redis-deploy", "Create Redis deployment")
redisDestroyCmd = debugCmd.Command("redis-destroy", "Destroy Redis deployment")
inputCmd = debugCmd.Command("input", "Input test")
inputArgCmd = inputCmd.Arg("input arg", "input arg").Default("").String()
inputSelectFlag = inputCmd.Flag("select", "select").Bool()
// start, stop, init and status
startCmd = kingpin.Command("start", "Start Development Enviroment")
// init
initCmd = kingpin.Command("init", "Initialise SDK Repository and related informations")
initDirArg = initCmd.Arg("directory", "work directory").Default("").String()
initRepoArg = initCmd.Arg("repo", "Repository").Default("").String()
initWhiskKeyFlag = initCmd.Flag("whisk-apikey", "Whisk API Key").Default("").String()
initIOKeyFlag = initCmd.Flag("io-apikey", "IO API Key").Default("").String()
initWskPropsFlag = initCmd.Flag("wskprops", "Write .wskprops").Default("false").Bool()
// stop
stopCmd = kingpin.Command("stop", "Stop Development Environment")
// restart
restartCmd = kingpin.Command("restart", "Restart Development Environment")
// status
statusCmd = kingpin.Command("status", "Check Containers Status")
)
func parseDebug(cmd string) bool |
func parse(cmd string) {
// debugging (hidden) commands
if parseDebug(cmd) {
return
}
// user visible commands
switch cmd {
// Start
case startCmd.FullCommand():
info, err := Start()
ShowError(err)
if err == nil {
PropagateConfig(info)
if !*skipOpenBrowser {
time.Sleep(2 * time.Second)
browser.OpenURL(BrowserURL)
}
}
// Stop
case stopCmd.FullCommand():
Stop()
case restartCmd.FullCommand():
Stop()
info, err := Start()
ShowError(err)
if err == nil {
PropagateConfig(info)
}
fmt.Println("\nRestarted, please reload the browser.")
// Init
case initCmd.FullCommand():
dir, err := Init(*initDirArg, *initRepoArg, os.Stderr)
if err == nil {
err = Configure(dir)
}
ShowError(err)
// Status
case statusCmd.FullCommand():
dockerStatus("iogw-openwhisk")
dockerStatus("iogw-redis")
dockerStatus("iogw-theia")
}
}
// Main entrypoint for wskide
func Main(version string) {
kingpin.UsageTemplate(kingpin.CompactUsageTemplate).Version(version).Author(Author)
kingpin.HelpFlag.Short('h')
kingpin.CommandLine.Help = Description
cmd := kingpin.Parse()
if *verboseFlag {
log.SetLevel(log.TraceLevel)
}
parse(cmd)
}
| {
switch cmd {
case wskProps.FullCommand():
ConfigLoad()
WskPropsSave()
case ideDeployCmd.FullCommand():
FatalIf(ConfigLoad())
info, _ := Preflight(Config.AppDir)
IdeDeploy(Config.AppDir, info)
configureIde(info)
case ideDestroyCmd.FullCommand():
IdeDestroy()
case whiskDeployCmd.FullCommand():
FatalIf(ConfigLoad())
WhiskDeploy()
WhiskUpdatePackageParameters("iosdk", ConfigMap())
case whiskDestroyCmd.FullCommand():
WhiskDestroy()
case redisDeployCmd.FullCommand():
RedisDeploy()
case redisDestroyCmd.FullCommand():
RedisDestroy()
case inputCmd.FullCommand():
if !*inputSelectFlag {
fmt.Printf("result: '%s'\n", Input("Input Test", *inputArgCmd))
} else {
fmt.Printf("select: '%s'\n", Select("Select Test", *inputArgCmd))
}
default:
return false
}
return true
} |
loader_2.py | import os
import re
import codecs
from utils import create_dico, create_mapping, zero_digits
from utils import iob2, iob_iobes
def load_sentences(path, lower, zeros):
"""
Load sentences. A line must contain at least a word and its tag.
Sentences are separated by empty lines.
"""
sentences = []
sentence = []
for line in codecs.open(path, 'r', 'utf8'):
line = zero_digits(line.rstrip()) if zeros else line.rstrip()
if not line:
if len(sentence) > 0:
if 'DOCSTART' not in sentence[0][0]:
sentences.append(sentence)
sentence = []
else:
word = line.split()
assert len(word) >= 2
sentence.append(word)
if len(sentence) > 0:
if 'DOCSTART' not in sentence[0][0]:
sentences.append(sentence)
return sentences
def update_tag_scheme(sentences, tag_scheme):
"""
Check and update sentences tagging scheme to IOB2.
Only IOB1 and IOB2 schemes are accepted.
"""
for i, s in enumerate(sentences):
tags = [w[-1] for w in s]
# Check that tags are given in the IOB format
if not iob2(tags):
s_str = '\n'.join(' '.join(w) for w in s)
raise Exception('Sentences should be given in IOB format! ' +
'Please check sentence %i:\n%s' % (i, s_str))
if tag_scheme == 'iob':
# If format was IOB1, we convert to IOB2
for word, new_tag in zip(s, tags):
word[-1] = new_tag
elif tag_scheme == 'iobes':
new_tags = iob_iobes(tags)
for word, new_tag in zip(s, new_tags):
word[-1] = new_tag
else:
raise Exception('Unknown tagging scheme!')
def word_mapping(sentences, lower):
"""
Create a dictionary and a mapping of words, sorted by frequency.
"""
words = [[x[0].lower() if lower else x[0] for x in s] for s in sentences]
dico = create_dico(words)
dico['<UNK>'] = 10000000
word_to_id, id_to_word = create_mapping(dico)
print "Found %i unique words (%i in total)" % (
len(dico), sum(len(x) for x in words)
)
return dico, word_to_id, id_to_word
def char_mapping(sentences):
"""
Create a dictionary and mapping of characters, sorted by frequency.
"""
chars = ["".join([w[0] for w in s]) for s in sentences]
dico = create_dico(chars)
char_to_id, id_to_char = create_mapping(dico)
print "Found %i unique characters" % len(dico)
return dico, char_to_id, id_to_char
def tag_mapping(sentences):
"""
Create a dictionary and a mapping of tags, sorted by frequency.
"""
tags = [[word[-1] for word in s] for s in sentences]
dico = create_dico(tags)
tag_to_id, id_to_tag = create_mapping(dico)
print "Found %i unique named entity tags" % len(dico)
return dico, tag_to_id, id_to_tag
def cap_feature(s):
"""
Capitalization feature:
0 = low caps
1 = all caps
2 = first letter caps
3 = one capital (not first letter)
"""
if s.lower() == s:
return 0
elif s.upper() == s:
return 1
elif s[0].upper() == s[0]:
return 2
else:
return 3
def prepare_dataset(sentences, word_to_id, char_to_id, tag_to_id, lower=False):
"""
Prepare the dataset. Return a list of lists of dictionaries containing:
- word indexes
- word char indexes
- tag indexes
"""
def f(x): return x.lower() if lower else x
data = []
for s in sentences:
str_words = [w[0] for w in s]
words = [word_to_id[f(w) if f(w) in word_to_id else '<UNK>']
for w in str_words]
# Skip characters that are not in the training set
chars = [[char_to_id[c] for c in w if c in char_to_id]
for w in str_words]
caps = [cap_feature(w) for w in str_words]
tags = [tag_to_id[w[-1]] for w in s]
data.append({
'str_words': str_words,
'words': words,
'chars': chars,
'caps': caps,
'tags': tags,
})
return data
def augment_with_pretrained(dictionary, ext_emb_path, words):
"""
Augment the dictionary with words that have a pretrained embedding.
If `words` is None, we add every word that has a pretrained embedding
to the dictionary, otherwise, we only add the words that are given by
`words` (typically the words in the development and test sets.)
"""
print 'Loading pretrained embeddings from %s...' % ext_emb_path
assert os.path.isfile(ext_emb_path)
# Load pretrained embeddings from file
pretrained = set([
line.rstrip().split()[0].strip()
for line in codecs.open(ext_emb_path, 'r', 'utf-8')
if len(ext_emb_path) > 0
])
# We either add every word in the pretrained file,
# or only words given in the `words` list to which
# we can assign a pretrained embedding
if words is None:
for word in pretrained: | for word in words:
if any(x in pretrained for x in [
word,
word.lower(),
re.sub('\d', '0', word.lower())
]) and word not in dictionary:
dictionary[word] = 0
word_to_id, id_to_word = create_mapping(dictionary)
return dictionary, word_to_id, id_to_word | if word not in dictionary:
dictionary[word] = 0
else: |
conftest.py | import logging
import os
import pytest
from rasa_nlu import data_router, config
from rasa_nlu.components import ComponentBuilder
from rasa_nlu.model import Trainer
from rasa_nlu.utils import zip_folder
from rasa_nlu import training_data
logging.basicConfig(level="DEBUG")
CONFIG_DEFAULTS_PATH = "sample_configs/config_defaults.yml"
DEFAULT_DATA_PATH = "data/examples/rasa/demo-rasa.json"
TEST_MODEL_PATH = "test_models/test_model_pretrained_embeddings"
# see `rasa_nlu.data_router` for details. avoids deadlock in
# `deferred_from_future` function during tests
data_router.DEFERRED_RUN_IN_REACTOR_THREAD = False
@pytest.fixture(scope="session")
def component_builder():
return ComponentBuilder()
@pytest.fixture(scope="session")
def spacy_nlp(component_builder, default_config):
spacy_nlp_config = {'name': 'SpacyNLP'}
return component_builder.create_component(spacy_nlp_config,
default_config).nlp
@pytest.fixture(scope="session")
def ner_crf_pos_feature_config():
return {
"features": [
["low", "title", "upper", "pos", "pos2"],
["bias", "low", "suffix3", "suffix2", "upper",
"title", "digit", "pos", "pos2", "pattern"],
["low", "title", "upper", "pos", "pos2"]]
}
@pytest.fixture(scope="session")
def mitie_feature_extractor(component_builder, default_config):
mitie_nlp_config = {'name': 'MitieNLP'}
return component_builder.create_component(mitie_nlp_config,
default_config).extractor
@pytest.fixture(scope="session")
def default_config():
return config.load(CONFIG_DEFAULTS_PATH)
@pytest.fixture(scope="session") | cfg = config.load(spacy_config_path)
trainer = Trainer(cfg)
td = training_data.load_data(DEFAULT_DATA_PATH)
trainer.train(td)
trainer.persist("test_models",
project_name="test_model_pretrained_embeddings")
model_dir_list = os.listdir(TEST_MODEL_PATH)
# directory name of latest model
model_dir = sorted(model_dir_list)[-1]
# path of that directory
model_path = os.path.join(TEST_MODEL_PATH, model_dir)
zip_path = zip_folder(model_path)
return zip_path | def zipped_nlu_model():
spacy_config_path = "sample_configs/config_pretrained_embeddings_spacy.yml"
|
tests.py | """
Regression tests for Model inheritance behavior.
"""
from __future__ import unicode_literals
import datetime
from operator import attrgetter
from unittest import expectedFailure
from django import forms
from django.test import TestCase
from .models import (
ArticleWithAuthor, BachelorParty, BirthdayParty, BusStation, Child,
DerivedM, InternalCertificationAudit, ItalianRestaurant, M2MChild,
MessyBachelorParty, ParkingLot, ParkingLot2, ParkingLot3, ParkingLot4A,
ParkingLot4B, Person, Place, Profile, QualityControl, Restaurant,
SelfRefChild, SelfRefParent, Senator, Supplier, TrainStation, User,
Wholesaler,
)
class ModelInheritanceTest(TestCase):
def test_model_inheritance(self):
# Regression for #7350, #7202
# Check that when you create a Parent object with a specific reference
# to an existent child instance, saving the Parent doesn't duplicate
# the child. This behavior is only activated during a raw save - it
# is mostly relevant to deserialization, but any sort of CORBA style
# 'narrow()' API would require a similar approach.
# Create a child-parent-grandparent chain
place1 = Place(
name="Guido's House of Pasta",
address='944 W. Fullerton')
place1.save_base(raw=True)
restaurant = Restaurant(
place_ptr=place1,
serves_hot_dogs=True,
serves_pizza=False)
restaurant.save_base(raw=True)
italian_restaurant = ItalianRestaurant(
restaurant_ptr=restaurant,
serves_gnocchi=True)
italian_restaurant.save_base(raw=True)
# Create a child-parent chain with an explicit parent link
place2 = Place(name='Main St', address='111 Main St')
place2.save_base(raw=True)
park = ParkingLot(parent=place2, capacity=100)
park.save_base(raw=True)
# Check that no extra parent objects have been created.
places = list(Place.objects.all())
self.assertEqual(places, [place1, place2])
dicts = list(Restaurant.objects.values('name', 'serves_hot_dogs'))
self.assertEqual(dicts, [{
'name': "Guido's House of Pasta",
'serves_hot_dogs': True
}])
dicts = list(ItalianRestaurant.objects.values(
'name', 'serves_hot_dogs', 'serves_gnocchi'))
self.assertEqual(dicts, [{
'name': "Guido's House of Pasta",
'serves_gnocchi': True,
'serves_hot_dogs': True,
}])
dicts = list(ParkingLot.objects.values('name', 'capacity'))
self.assertEqual(dicts, [{
'capacity': 100,
'name': 'Main St',
}])
# You can also update objects when using a raw save.
place1.name = "Guido's All New House of Pasta"
place1.save_base(raw=True)
restaurant.serves_hot_dogs = False
restaurant.save_base(raw=True)
italian_restaurant.serves_gnocchi = False
italian_restaurant.save_base(raw=True)
place2.name = 'Derelict lot'
place2.save_base(raw=True)
park.capacity = 50
park.save_base(raw=True)
# No extra parent objects after an update, either.
places = list(Place.objects.all())
self.assertEqual(places, [place2, place1])
self.assertEqual(places[0].name, 'Derelict lot')
self.assertEqual(places[1].name, "Guido's All New House of Pasta")
dicts = list(Restaurant.objects.values('name', 'serves_hot_dogs'))
self.assertEqual(dicts, [{
'name': "Guido's All New House of Pasta",
'serves_hot_dogs': False,
}])
dicts = list(ItalianRestaurant.objects.values(
'name', 'serves_hot_dogs', 'serves_gnocchi'))
self.assertEqual(dicts, [{
'name': "Guido's All New House of Pasta",
'serves_gnocchi': False,
'serves_hot_dogs': False,
}])
dicts = list(ParkingLot.objects.values('name', 'capacity'))
self.assertEqual(dicts, [{
'capacity': 50,
'name': 'Derelict lot',
}])
# If you try to raw_save a parent attribute onto a child object,
# the attribute will be ignored.
italian_restaurant.name = "Lorenzo's Pasta Hut"
italian_restaurant.save_base(raw=True)
# Note that the name has not changed
# - name is an attribute of Place, not ItalianRestaurant
dicts = list(ItalianRestaurant.objects.values(
'name', 'serves_hot_dogs', 'serves_gnocchi'))
self.assertEqual(dicts, [{
'name': "Guido's All New House of Pasta",
'serves_gnocchi': False,
'serves_hot_dogs': False,
}])
def test_issue_7105(self):
# Regressions tests for #7105: dates() queries should be able to use
# fields from the parent model as easily as the child.
Child.objects.create(
name='child',
created=datetime.datetime(2008, 6, 26, 17, 0, 0))
datetimes = list(Child.objects.datetimes('created', 'month'))
self.assertEqual(datetimes, [datetime.datetime(2008, 6, 1, 0, 0)])
def test_issue_7276(self):
# Regression test for #7276: calling delete() on a model with
# multi-table inheritance should delete the associated rows from any
# ancestor tables, as well as any descendent objects.
place1 = Place(
name="Guido's House of Pasta",
address='944 W. Fullerton')
place1.save_base(raw=True)
restaurant = Restaurant(
place_ptr=place1,
serves_hot_dogs=True,
serves_pizza=False)
restaurant.save_base(raw=True)
italian_restaurant = ItalianRestaurant(
restaurant_ptr=restaurant,
serves_gnocchi=True)
italian_restaurant.save_base(raw=True)
ident = ItalianRestaurant.objects.all()[0].id
self.assertEqual(Place.objects.get(pk=ident), place1)
Restaurant.objects.create(
name='a',
address='xx',
serves_hot_dogs=True,
serves_pizza=False)
# This should delete both Restaurants, plus the related places, plus
# the ItalianRestaurant.
Restaurant.objects.all().delete()
with self.assertRaises(Place.DoesNotExist):
Place.objects.get(pk=ident)
with self.assertRaises(ItalianRestaurant.DoesNotExist):
ItalianRestaurant.objects.get(pk=ident)
def test_issue_6755(self):
"""
Regression test for #6755
"""
r = Restaurant(serves_pizza=False, serves_hot_dogs=False)
r.save()
self.assertEqual(r.id, r.place_ptr_id)
orig_id = r.id
r = Restaurant(place_ptr_id=orig_id, serves_pizza=True, serves_hot_dogs=False)
r.save()
self.assertEqual(r.id, orig_id)
self.assertEqual(r.id, r.place_ptr_id)
def test_issue_7488(self):
# Regression test for #7488. This looks a little crazy, but it's the
# equivalent of what the admin interface has to do for the edit-inline
# case.
suppliers = Supplier.objects.filter(
restaurant=Restaurant(name='xx', address='yy'))
suppliers = list(suppliers)
self.assertEqual(suppliers, [])
def test_issue_11764(self):
"""
Regression test for #11764
"""
wholesalers = list(Wholesaler.objects.all().select_related())
self.assertEqual(wholesalers, [])
def test_issue_7853(self):
"""
Regression test for #7853
If the parent class has a self-referential link, make sure that any
updates to that link via the child update the right table.
"""
obj = SelfRefChild.objects.create(child_data=37, parent_data=42)
obj.delete()
def test_get_next_previous_by_date(self):
"""
Regression tests for #8076
get_(next/previous)_by_date should work
"""
c1 = ArticleWithAuthor(
headline='ArticleWithAuthor 1',
author="Person 1",
pub_date=datetime.datetime(2005, 8, 1, 3, 0))
c1.save()
c2 = ArticleWithAuthor(
headline='ArticleWithAuthor 2',
author="Person 2",
pub_date=datetime.datetime(2005, 8, 1, 10, 0))
c2.save()
c3 = ArticleWithAuthor(
headline='ArticleWithAuthor 3',
author="Person 3",
pub_date=datetime.datetime(2005, 8, 2))
c3.save()
self.assertEqual(c1.get_next_by_pub_date(), c2)
self.assertEqual(c2.get_next_by_pub_date(), c3)
with self.assertRaises(ArticleWithAuthor.DoesNotExist):
c3.get_next_by_pub_date()
self.assertEqual(c3.get_previous_by_pub_date(), c2)
self.assertEqual(c2.get_previous_by_pub_date(), c1)
with self.assertRaises(ArticleWithAuthor.DoesNotExist):
c1.get_previous_by_pub_date()
def test_inherited_fields(self):
"""
Regression test for #8825 and #9390
Make sure all inherited fields (esp. m2m fields, in this case) appear
on the child class.
"""
m2mchildren = list(M2MChild.objects.filter(articles__isnull=False))
self.assertEqual(m2mchildren, [])
# Ordering should not include any database column more than once (this
# is most likely to occur naturally with model inheritance, so we
# check it here). Regression test for #9390. This necessarily pokes at
# the SQL string for the query, since the duplicate problems are only
# apparent at that late stage.
qs = ArticleWithAuthor.objects.order_by('pub_date', 'pk')
sql = qs.query.get_compiler(qs.db).as_sql()[0]
fragment = sql[sql.find('ORDER BY'):]
pos = fragment.find('pub_date')
self.assertEqual(fragment.find('pub_date', pos + 1), -1)
def test_queryset_update_on_parent_model(self):
"""
Regression test for #10362
It is possible to call update() and only change a field in
an ancestor model.
"""
article = ArticleWithAuthor.objects.create(
author="fred",
headline="Hey there!",
pub_date=datetime.datetime(2009, 3, 1, 8, 0, 0))
update = ArticleWithAuthor.objects.filter(
author="fred").update(headline="Oh, no!")
self.assertEqual(update, 1)
update = ArticleWithAuthor.objects.filter(
pk=article.pk).update(headline="Oh, no!")
self.assertEqual(update, 1)
derivedm1 = DerivedM.objects.create(
customPK=44,
base_name="b1",
derived_name="d1")
self.assertEqual(derivedm1.customPK, 44)
self.assertEqual(derivedm1.base_name, 'b1')
self.assertEqual(derivedm1.derived_name, 'd1')
derivedms = list(DerivedM.objects.all())
self.assertEqual(derivedms, [derivedm1])
def test_use_explicit_o2o_to_parent_as_pk(self):
"""
Regression tests for #10406
If there's a one-to-one link between a child model and the parent and
no explicit pk declared, we can use the one-to-one link as the pk on
the child.
"""
self.assertEqual(ParkingLot2._meta.pk.name, "parent")
# However, the connector from child to parent need not be the pk on
# the child at all.
self.assertEqual(ParkingLot3._meta.pk.name, "primary_key")
# the child->parent link
self.assertEqual(
ParkingLot3._meta.get_ancestor_link(Place).name,
"parent")
def test_use_explicit_o2o_to_parent_from_abstract_model(self):
self.assertEqual(ParkingLot4A._meta.pk.name, "parent")
ParkingLot4A.objects.create(
name="Parking4A",
address='21 Jump Street',
)
self.assertEqual(ParkingLot4B._meta.pk.name, "parent")
ParkingLot4A.objects.create(
name="Parking4B",
address='21 Jump Street',
)
def test_all_fields_from_abstract_base_class(self):
"""
Regression tests for #7588
"""
# All fields from an ABC, including those inherited non-abstractly
# should be available on child classes (#7588). Creating this instance
# should work without error.
QualityControl.objects.create(
headline="Problems in Django",
pub_date=datetime.datetime.now(),
quality=10,
assignee="adrian")
def test_abstract_base_class_m2m_relation_inheritance(self):
# Check that many-to-many relations defined on an abstract base class
# are correctly inherited (and created) on the child class.
p1 = Person.objects.create(name='Alice')
p2 = Person.objects.create(name='Bob')
p3 = Person.objects.create(name='Carol')
p4 = Person.objects.create(name='Dave')
birthday = BirthdayParty.objects.create(
name='Birthday party for Alice')
birthday.attendees.set([p1, p3])
bachelor = BachelorParty.objects.create(name='Bachelor party for Bob')
bachelor.attendees.set([p2, p4])
parties = list(p1.birthdayparty_set.all())
self.assertEqual(parties, [birthday])
parties = list(p1.bachelorparty_set.all())
self.assertEqual(parties, [])
parties = list(p2.bachelorparty_set.all())
self.assertEqual(parties, [bachelor])
# Check that a subclass of a subclass of an abstract model doesn't get
# its own accessor.
self.assertFalse(hasattr(p2, 'messybachelorparty_set'))
# ... but it does inherit the m2m from its parent
messy = MessyBachelorParty.objects.create(
name='Bachelor party for Dave')
messy.attendees.set([p4])
messy_parent = messy.bachelorparty_ptr
parties = list(p4.bachelorparty_set.all())
self.assertEqual(parties, [bachelor, messy_parent])
def test_abstract_verbose_name_plural_inheritance(self):
|
def test_inherited_nullable_exclude(self):
obj = SelfRefChild.objects.create(child_data=37, parent_data=42)
self.assertQuerysetEqual(
SelfRefParent.objects.exclude(self_data=72), [
obj.pk
],
attrgetter("pk")
)
self.assertQuerysetEqual(
SelfRefChild.objects.exclude(self_data=72), [
obj.pk
],
attrgetter("pk")
)
def test_concrete_abstract_concrete_pk(self):
"""
Primary key set correctly with concrete->abstract->concrete inheritance.
"""
# Regression test for #13987: Primary key is incorrectly determined
# when more than one model has a concrete->abstract->concrete
# inheritance hierarchy.
self.assertEqual(
len([field for field in BusStation._meta.local_fields if field.primary_key]),
1
)
self.assertEqual(
len([field for field in TrainStation._meta.local_fields if field.primary_key]),
1
)
self.assertIs(BusStation._meta.pk.model, BusStation)
self.assertIs(TrainStation._meta.pk.model, TrainStation)
def test_inherited_unique_field_with_form(self):
"""
Test that a model which has different primary key for the parent model
passes unique field checking correctly. Refs #17615.
"""
class ProfileForm(forms.ModelForm):
class Meta:
model = Profile
fields = '__all__'
User.objects.create(username="user_only")
p = Profile.objects.create(username="user_with_profile")
form = ProfileForm({'username': "user_with_profile", 'extra': "hello"},
instance=p)
self.assertTrue(form.is_valid())
def test_inheritance_joins(self):
# Test for #17502 - check that filtering through two levels of
# inheritance chain doesn't generate extra joins.
qs = ItalianRestaurant.objects.all()
self.assertEqual(str(qs.query).count('JOIN'), 2)
qs = ItalianRestaurant.objects.filter(name='foo')
self.assertEqual(str(qs.query).count('JOIN'), 2)
@expectedFailure
def test_inheritance_values_joins(self):
# It would be nice (but not too important) to skip the middle join in
# this case. Skipping is possible as nothing from the middle model is
# used in the qs and top contains direct pointer to the bottom model.
qs = ItalianRestaurant.objects.values_list('serves_gnocchi').filter(name='foo')
self.assertEqual(str(qs.query).count('JOIN'), 1)
def test_issue_21554(self):
senator = Senator.objects.create(
name='John Doe', title='X', state='Y'
)
senator = Senator.objects.get(pk=senator.pk)
self.assertEqual(senator.name, 'John Doe')
self.assertEqual(senator.title, 'X')
self.assertEqual(senator.state, 'Y')
def test_inheritance_resolve_columns(self):
Restaurant.objects.create(name='Bobs Cafe', address="Somewhere",
serves_pizza=True, serves_hot_dogs=True)
p = Place.objects.all().select_related('restaurant')[0]
self.assertIsInstance(p.restaurant.serves_pizza, bool)
def test_inheritance_select_related(self):
# Regression test for #7246
r1 = Restaurant.objects.create(
name="Nobu", serves_hot_dogs=True, serves_pizza=False
)
r2 = Restaurant.objects.create(
name="Craft", serves_hot_dogs=False, serves_pizza=True
)
Supplier.objects.create(name="John", restaurant=r1)
Supplier.objects.create(name="Jane", restaurant=r2)
self.assertQuerysetEqual(
Supplier.objects.order_by("name").select_related(), [
"Jane",
"John",
],
attrgetter("name")
)
jane = Supplier.objects.order_by("name").select_related("restaurant")[0]
self.assertEqual(jane.restaurant.name, "Craft")
def test_related_filtering_query_efficiency_ticket_15844(self):
r = Restaurant.objects.create(
name="Guido's House of Pasta",
address='944 W. Fullerton',
serves_hot_dogs=True,
serves_pizza=False,
)
s = Supplier.objects.create(restaurant=r)
with self.assertNumQueries(1):
self.assertQuerysetEqual(
Supplier.objects.filter(restaurant=r),
[s], lambda x: x,
)
with self.assertNumQueries(1):
self.assertQuerysetEqual(
r.supplier_set.all(),
[s], lambda x: x,
)
| """
verbose_name_plural correctly inherited from ABC if inheritance chain
includes an abstract model.
"""
# Regression test for #11369: verbose_name_plural should be inherited
# from an ABC even when there are one or more intermediate
# abstract models in the inheritance chain, for consistency with
# verbose_name.
self.assertEqual(
InternalCertificationAudit._meta.verbose_name_plural,
'Audits'
) |
models.py | from datetime import date, timedelta
from django.db import models
class Plant(models.Model): | name = models.CharField(max_length=100)
image_url = models.URLField(max_length=200)
description = models.TextField()
def __str__(self):
return self.name
class WateringLog(models.Model):
plant = models.ForeignKey(
Plant, on_delete=models.CASCADE
)
water_date = models.DateField(auto_now_add=True)
next_suggested_date = models.DateField()
def __str__(self):
return f'{self.plant.name} may need water on {self.next_suggested_date}'
def save(self, *args, **kwargs):
# self.next_suggested_date = date.today() + timedelta(7)
super().save(*args, **kwargs) | |
ExecutorGroup.js | /**
* @module ol/render/canvas/ExecutorGroup
*/
import BuilderType from './BuilderType.js';
import Executor from './Executor.js';
import {buffer, createEmpty, extendCoordinate} from '../../extent.js';
import {
compose as composeTransform,
create as createTransform,
} from '../../transform.js';
import {createCanvasContext2D} from '../../dom.js';
import {isEmpty} from '../../obj.js';
import {numberSafeCompareFunction} from '../../array.js';
import {transform2D} from '../../geom/flat/transform.js';
/**
* @const
* @type {Array<import("./BuilderType.js").default>}
*/
const ORDER = [
BuilderType.POLYGON,
BuilderType.CIRCLE,
BuilderType.LINE_STRING,
BuilderType.IMAGE,
BuilderType.TEXT,
BuilderType.DEFAULT,
];
class ExecutorGroup {
/**
* @param {import("../../extent.js").Extent} maxExtent Max extent for clipping. When a
* `maxExtent` was set on the Buillder for this executor group, the same `maxExtent`
* should be set here, unless the target context does not exceet that extent (which
* can be the case when rendering to tiles).
* @param {number} resolution Resolution.
* @param {number} pixelRatio Pixel ratio.
* @param {boolean} overlaps The executor group can have overlapping geometries.
* @param {!Object<string, !Object<import("./BuilderType.js").default, import("../canvas.js").SerializableInstructions>>} allInstructions
* The serializable instructions.
* @param {number=} opt_renderBuffer Optional rendering buffer.
*/
constructor(
maxExtent,
resolution,
pixelRatio,
overlaps,
allInstructions,
opt_renderBuffer
) {
/**
* @private
* @type {import("../../extent.js").Extent}
*/
this.maxExtent_ = maxExtent;
/**
* @private
* @type {boolean}
*/
this.overlaps_ = overlaps;
/**
* @private
* @type {number}
*/
this.pixelRatio_ = pixelRatio;
/**
* @private
* @type {number}
*/
this.resolution_ = resolution;
/**
* @private
* @type {number|undefined}
*/
this.renderBuffer_ = opt_renderBuffer;
/**
* @private
* @type {!Object<string, !Object<import("./BuilderType.js").default, import("./Executor").default>>}
*/
this.executorsByZIndex_ = {};
/**
* @private
* @type {CanvasRenderingContext2D}
*/
this.hitDetectionContext_ = null;
/**
* @private
* @type {import("../../transform.js").Transform}
*/
this.hitDetectionTransform_ = createTransform();
this.createExecutors_(allInstructions);
}
/**
* @param {CanvasRenderingContext2D} context Context.
* @param {import("../../transform.js").Transform} transform Transform.
*/
clip(context, transform) {
const flatClipCoords = this.getClipCoords(transform);
context.beginPath();
context.moveTo(flatClipCoords[0], flatClipCoords[1]);
context.lineTo(flatClipCoords[2], flatClipCoords[3]);
context.lineTo(flatClipCoords[4], flatClipCoords[5]);
context.lineTo(flatClipCoords[6], flatClipCoords[7]);
context.clip();
}
/**
* Create executors and populate them using the provided instructions.
* @private
* @param {!Object<string, !Object<import("./BuilderType.js").default, import("../canvas.js").SerializableInstructions>>} allInstructions The serializable instructions
*/
createExecutors_(allInstructions) {
for (const zIndex in allInstructions) {
let executors = this.executorsByZIndex_[zIndex];
if (executors === undefined) {
executors = {};
this.executorsByZIndex_[zIndex] = executors;
}
const instructionByZindex = allInstructions[zIndex];
const renderBuffer = [this.renderBuffer_ || 0, this.renderBuffer_ || 0];
for (const builderType in instructionByZindex) {
const instructions = instructionByZindex[builderType];
executors[builderType] = new Executor(
this.resolution_,
this.pixelRatio_,
this.overlaps_,
instructions,
renderBuffer
);
}
}
}
/**
* @param {Array<import("./BuilderType.js").default>} executors Executors.
* @return {boolean} Has executors of the provided types.
*/
hasExecutors(executors) {
for (const zIndex in this.executorsByZIndex_) {
const candidates = this.executorsByZIndex_[zIndex];
for (let i = 0, ii = executors.length; i < ii; ++i) {
if (executors[i] in candidates) {
return true;
}
}
}
return false;
}
/**
* @param {import("../../coordinate.js").Coordinate} coordinate Coordinate.
* @param {number} resolution Resolution.
* @param {number} rotation Rotation.
* @param {number} hitTolerance Hit tolerance in pixels.
* @param {function(import("../../Feature.js").FeatureLike): T} callback Feature callback.
* @param {Array<import("../../Feature.js").FeatureLike>} declutteredFeatures Decluttered features.
* @return {T|undefined} Callback result.
* @template T
*/
forEachFeatureAtCoordinate(
coordinate,
resolution,
rotation,
hitTolerance,
callback,
declutteredFeatures
) {
hitTolerance = Math.round(hitTolerance);
const contextSize = hitTolerance * 2 + 1;
const transform = composeTransform(
this.hitDetectionTransform_,
hitTolerance + 0.5,
hitTolerance + 0.5,
1 / resolution,
-1 / resolution,
-rotation,
-coordinate[0],
-coordinate[1]
);
if (!this.hitDetectionContext_) {
this.hitDetectionContext_ = createCanvasContext2D(
contextSize,
contextSize
);
}
const context = this.hitDetectionContext_;
if (
context.canvas.width !== contextSize ||
context.canvas.height !== contextSize
) {
context.canvas.width = contextSize;
context.canvas.height = contextSize;
} else {
context.clearRect(0, 0, contextSize, contextSize);
}
/**
* @type {import("../../extent.js").Extent}
*/
let hitExtent;
if (this.renderBuffer_ !== undefined) {
hitExtent = createEmpty();
extendCoordinate(hitExtent, coordinate);
buffer(
hitExtent,
resolution * (this.renderBuffer_ + hitTolerance),
hitExtent
);
}
const mask = getCircleArray(hitTolerance);
let builderType;
/**
* @param {import("../../Feature.js").FeatureLike} feature Feature.
* @return {?} Callback result.
*/
function | (feature) {
const imageData = context.getImageData(0, 0, contextSize, contextSize)
.data;
for (let i = 0; i < contextSize; i++) {
for (let j = 0; j < contextSize; j++) {
if (mask[i][j]) {
if (imageData[(j * contextSize + i) * 4 + 3] > 0) {
let result;
if (
!(
declutteredFeatures &&
(builderType == BuilderType.IMAGE ||
builderType == BuilderType.TEXT)
) ||
declutteredFeatures.indexOf(feature) !== -1
) {
result = callback(feature);
}
if (result) {
return result;
} else {
context.clearRect(0, 0, contextSize, contextSize);
return undefined;
}
}
}
}
}
}
/** @type {Array<number>} */
const zs = Object.keys(this.executorsByZIndex_).map(Number);
zs.sort(numberSafeCompareFunction);
let i, j, executors, executor, result;
for (i = zs.length - 1; i >= 0; --i) {
const zIndexKey = zs[i].toString();
executors = this.executorsByZIndex_[zIndexKey];
for (j = ORDER.length - 1; j >= 0; --j) {
builderType = ORDER[j];
executor = executors[builderType];
if (executor !== undefined) {
result = executor.executeHitDetection(
context,
transform,
rotation,
featureCallback,
hitExtent
);
if (result) {
return result;
}
}
}
}
return undefined;
}
/**
* @param {import("../../transform.js").Transform} transform Transform.
* @return {Array<number>} Clip coordinates.
*/
getClipCoords(transform) {
const maxExtent = this.maxExtent_;
if (!maxExtent) {
return null;
}
const minX = maxExtent[0];
const minY = maxExtent[1];
const maxX = maxExtent[2];
const maxY = maxExtent[3];
const flatClipCoords = [minX, minY, minX, maxY, maxX, maxY, maxX, minY];
transform2D(flatClipCoords, 0, 8, 2, transform, flatClipCoords);
return flatClipCoords;
}
/**
* @return {boolean} Is empty.
*/
isEmpty() {
return isEmpty(this.executorsByZIndex_);
}
/**
* @param {CanvasRenderingContext2D} context Context.
* @param {number} contextScale Scale of the context.
* @param {import("../../transform.js").Transform} transform Transform.
* @param {number} viewRotation View rotation.
* @param {boolean} snapToPixel Snap point symbols and test to integer pixel.
* @param {Array<import("./BuilderType.js").default>=} opt_builderTypes Ordered replay types to replay.
* Default is {@link module:ol/render/replay~ORDER}
* @param {import("rbush").default=} opt_declutterTree Declutter tree.
*/
execute(
context,
contextScale,
transform,
viewRotation,
snapToPixel,
opt_builderTypes,
opt_declutterTree
) {
/** @type {Array<number>} */
const zs = Object.keys(this.executorsByZIndex_).map(Number);
zs.sort(numberSafeCompareFunction);
// setup clipping so that the parts of over-simplified geometries are not
// visible outside the current extent when panning
if (this.maxExtent_) {
context.save();
this.clip(context, transform);
}
const builderTypes = opt_builderTypes ? opt_builderTypes : ORDER;
let i, ii, j, jj, replays, replay;
if (opt_declutterTree) {
zs.reverse();
}
for (i = 0, ii = zs.length; i < ii; ++i) {
const zIndexKey = zs[i].toString();
replays = this.executorsByZIndex_[zIndexKey];
for (j = 0, jj = builderTypes.length; j < jj; ++j) {
const builderType = builderTypes[j];
replay = replays[builderType];
if (replay !== undefined) {
replay.execute(
context,
contextScale,
transform,
viewRotation,
snapToPixel,
opt_declutterTree
);
}
}
}
if (this.maxExtent_) {
context.restore();
}
}
}
/**
* This cache is used for storing calculated pixel circles for increasing performance.
* It is a static property to allow each Replaygroup to access it.
* @type {Object<number, Array<Array<(boolean|undefined)>>>}
*/
const circleArrayCache = {
0: [[true]],
};
/**
* This method fills a row in the array from the given coordinate to the
* middle with `true`.
* @param {Array<Array<(boolean|undefined)>>} array The array that will be altered.
* @param {number} x X coordinate.
* @param {number} y Y coordinate.
*/
function fillCircleArrayRowToMiddle(array, x, y) {
let i;
const radius = Math.floor(array.length / 2);
if (x >= radius) {
for (i = radius; i < x; i++) {
array[i][y] = true;
}
} else if (x < radius) {
for (i = x + 1; i < radius; i++) {
array[i][y] = true;
}
}
}
/**
* This methods creates a circle inside a fitting array. Points inside the
* circle are marked by true, points on the outside are undefined.
* It uses the midpoint circle algorithm.
* A cache is used to increase performance.
* @param {number} radius Radius.
* @returns {Array<Array<(boolean|undefined)>>} An array with marked circle points.
*/
export function getCircleArray(radius) {
if (circleArrayCache[radius] !== undefined) {
return circleArrayCache[radius];
}
const arraySize = radius * 2 + 1;
const arr = new Array(arraySize);
for (let i = 0; i < arraySize; i++) {
arr[i] = new Array(arraySize);
}
let x = radius;
let y = 0;
let error = 0;
while (x >= y) {
fillCircleArrayRowToMiddle(arr, radius + x, radius + y);
fillCircleArrayRowToMiddle(arr, radius + y, radius + x);
fillCircleArrayRowToMiddle(arr, radius - y, radius + x);
fillCircleArrayRowToMiddle(arr, radius - x, radius + y);
fillCircleArrayRowToMiddle(arr, radius - x, radius - y);
fillCircleArrayRowToMiddle(arr, radius - y, radius - x);
fillCircleArrayRowToMiddle(arr, radius + y, radius - x);
fillCircleArrayRowToMiddle(arr, radius + x, radius - y);
y++;
error += 1 + 2 * y;
if (2 * (error - x) + 1 > 0) {
x -= 1;
error += 1 - 2 * x;
}
}
circleArrayCache[radius] = arr;
return arr;
}
export default ExecutorGroup;
| featureCallback |
items.py | # -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# https://doc.scrapy.org/en/latest/topics/items.html
import scrapy
# class AddressItem(scrapy.Item):
# # define the fields for your item here like:
# # name = scrapy.Field()
# pass
class AddressItem(scrapy.Item):
# define the fields for your item here:
| school_id = scrapy.Field()
name = scrapy.Field()
street_address = scrapy.Field()
city = scrapy.Field()
state = scrapy.Field()
postcode = scrapy.Field()
ref = scrapy.Field()
website = scrapy.Field()
extras = scrapy.Field() |
|
views.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.http import Http404
from core.models import (Category, Article, Source, BaseUserProfile,
BookmarkArticle, ArticleLike, HashTag, Menu, Notification, Devices,
SocialAccount, Category, CategoryAssociation,
TrendingArticle, Domain, DailyDigest, DraftMedia, Comment,
Subscription)
from rest_framework.authtoken.models import Token
from rest_framework.views import APIView
from .serializers import (CategorySerializer, ArticleSerializer, UserSerializer,
SourceSerializer, LoginUserSerializer, BaseUserProfileSerializer,
BookmarkArticleSerializer, ArticleLikeSerializer, HashTagSerializer,
MenuSerializer, NotificationSerializer, TrendingArticleSerializer,
ArticleCreateUpdateSerializer, DraftMediaSerializer, CommentSerializer,
CommentListSerializer, SubsMediaSerializer, UserProfileSerializer)
from rest_framework.response import Response
from rest_framework.permissions import AllowAny, IsAuthenticated
from rest_framework import filters
from newscout_web.constants import SOCIAL_AUTH_PROVIDERS
from django.db.models import Q
from rest_framework.exceptions import APIException
from collections import OrderedDict
from rest_framework import generics, viewsets
from rest_framework.pagination import CursorPagination
from rest_framework.generics import ListAPIView
from rest_framework.parsers import JSONParser
from django.core.mail import EmailMultiAlternatives
from django.conf import settings
from datetime import datetime, timedelta
from django.db.models import Count, Max, Min
import pytz
import uuid
from core.utils import es, ingest_to_elastic, delete_from_elastic
from elasticsearch_dsl import Search
import math
from rest_framework.utils.urls import replace_query_param
from google.auth.transport import requests as grequests
from google.oauth2 import id_token
import facebook
from .exception_handler import (create_error_response, TokenIDMissing, ProviderMissing,
SocialAuthTokenException)
import logging
import operator
from functools import reduce
import tweepy
import json
from captcha.models import CaptchaStore
from captcha.helpers import captcha_image_url
log = logging.getLogger(__name__)
def create_response(response_data):
"""
method used to create response data in given format
"""
response = OrderedDict()
response["header"] = {"status": "1"}
response["body"] = response_data
return response
def create_serializer_error_response(errors):
"""
methos is used to create error response for serializer errors
"""
error_list = []
for k, v in errors.items():
if isinstance(v, dict):
_, v = v.popitem()
d = {}
d["field"] = k
d["field_error"] = v[0]
error_list.append(d)
return OrderedDict({"header": {"status": "0"}, "errors": {
"errorList": error_list}})
class SignUpAPIView(APIView):
permission_classes = (AllowAny,)
def post(self, request, *args, **kwargs):
user_serializer = UserSerializer(data=request.data)
if user_serializer.is_valid():
user_serializer.save()
return Response(create_response({"Msg": "sign up successfully"}))
else:
return Response(
create_serializer_error_response(user_serializer.errors),
status=403)
class LoginFieldsRequired(APIException):
"""
api exception for no user found
"""
status_code = 401
default_detail = ("username and password are required")
default_code = "username_and_password"
class LoginAPIView(generics.GenericAPIView):
serializer_class = LoginUserSerializer
permission_classes = (AllowAny,)
def post(self, request, format=None):
serializer = LoginUserSerializer(data=request.data)
if not serializer.is_valid():
res_data = create_serializer_error_response(serializer.errors)
return Response(res_data, status=403)
user = BaseUserProfile.objects.filter(email=request.data["email"]).first()
device_name = request.data.get("device_name")
device_id = request.data.get("device_id")
if device_id and device_name:
device, _ = Devices.objects.get_or_create(user=user,
device_name=device_name,
device_id=device_id)
notification_obj, _ = Notification.objects.get_or_create(device=device)
notification = NotificationSerializer(notification_obj)
user_serializer = BaseUserProfileSerializer(user)
token, _ = Token.objects.get_or_create(user=user)
data = user_serializer.data
data["token"] = token.key
if device_id and device_name:
data["breaking_news"] = notification.data['breaking_news']
data["daily_edition"] = notification.data['daily_edition']
data["personalized"] = notification.data['personalized']
response_data = create_response({"user": data})
return Response(response_data)
class LogoutAPIView(APIView): | permission_classes = (IsAuthenticated,)
def get(self, request, format=None):
request.user.auth_token.delete()
return Response(create_response({"Msg": "User has been logged out"}))
class UserHashTagAPIView(APIView):
"""
Save new tags and remove older tags based on user selection
"""
permission_classes = (IsAuthenticated,)
parser_classes = (JSONParser,)
def post(self, request, format=None):
user = self.request.user
hash_tags = request.data["tags"]
user_tags = HashTag.objects.filter(name__in=hash_tags)
if user_tags:
user.passion.clear()
user.passion.add(*user_tags)
return Response(create_response({"Msg": "Successfully saved tags"}))
return Response(create_error_response({"Msg": "Invalid tags"}), status=400)
class CategoryListAPIView(APIView):
permission_classes = (AllowAny,)
def get(self, request, format=None, *args, **kwargs):
"""
List all news category
"""
categories = CategorySerializer(Category.objects.all(), many=True)
return Response(create_response({"categories": categories.data}))
def post(self, request, format=None):
"""
Save new category to database
"""
if request.user.is_authenticated:
serializer = CategorySerializer(data=request.data, many=True)
if serializer.is_valid():
serializer.save()
return Response(create_response(serializer.data))
return Response(create_error_response(serializer.errors), status=400)
raise Http404
def put(self, request, format=None):
"""
update category in database
"""
if request.user.is_authenticated:
_id = request.data.get("id")
category = Category.objects.get(id=_id)
serializer = CategorySerializer(category, data=request.data)
if serializer.is_valid():
serializer.save()
return Response(create_response(serializer.data))
return Response(create_error_response(serializer.errors), status=400)
raise Http404
class SourceListAPIView(APIView):
permission_classes = (AllowAny,)
def get(self, request, format=None, *args, **kwargs):
"""
List all the sources
"""
source = SourceSerializer(Source.objects.all(), many=True)
return Response(create_response({"results": source.data}))
class NoarticleFound(APIException):
"""
api exception for no user found
"""
status_code = 404
default_detail = ("Article does not exist")
default_code = "no_article_found"
class PostpageNumberPagination(CursorPagination):
page_size = 10
page_size_query_param = 'page_size'
ordering = '-created_at'
class ArticleListAPIView(ListAPIView):
serializer_class = ArticleSerializer
permission_classes = (AllowAny,)
pagination_class = PostpageNumberPagination
filter_backends = (filters.OrderingFilter,)
ordering = ('-published_on',)
def get_queryset(self):
q = self.request.GET.get("q", "")
tag = self.request.GET.getlist("tag", "")
category = self.request.GET.getlist("category", "")
source = self.request.GET.getlist("source", "")
queryset = Article.objects.all()
if self.request.user.domain:
queryset = queryset.filter(domain=self.request.user.domain)
else:
queryset = Article.objects.none()
if source:
queryset = queryset.filter(source__name__in=source)
if category:
queryset = queryset.filter(category__name__in=category)
if tag:
queryset = queryset.filter(hash_tags__name__in=tag)
if q:
q_list = q.split(" ")
condition_1 = reduce(operator.or_, [Q(title__icontains=s) for s in q_list])
condition_2 = reduce(operator.or_, [Q(full_text__icontains=s) for s in q_list])
queryset = queryset.filter(condition_1 | condition_2)
return queryset
def list(self, request, *args, **kwargs):
queryset = self.filter_queryset(self.get_queryset())
page = self.paginate_queryset(queryset)
if page is not None:
serializer = self.get_serializer(page, many=True)
if serializer.data:
paginated_response = self.get_paginated_response(serializer.data)
return Response(create_response(paginated_response.data))
else:
return Response(create_error_response({"Msg": "News Doesn't Exist"}), status=400)
class ArticleDetailAPIView(APIView):
permission_classes = (AllowAny,)
def get(self, request, format=None, *args, **kwargs):
slug = self.kwargs.get("slug", "")
user = self.request.user
article = Article.objects.filter(slug=slug).first()
has_subscribed = False
if not self.request.user.is_anonymous and \
Subscription.objects.filter(
user=self.request.user).exlcude(subs_type='Basic').exists():
has_subscribed = True
try:
next_article = Article.objects.filter(id__gt=article.id).order_by("id")[0:1].get().slug
except Exception as error:
print(error)
next_article = Article.objects.aggregate(Min("id"))['id__min']
try:
prev_article = Article.objects.filter(id__gt=article.id).order_by("-id")[0:1].get().slug
except Exception as error:
print(error)
prev_article = Article.objects.aggregate(Max("id"))['id__max']
if article:
response_data = ArticleSerializer(article, context={
"hash_tags_list": True, 'has_subscribed': has_subscribed}).data
if not user.is_anonymous:
book_mark_article = BookmarkArticle.objects.filter(
user=user, article=article).first()
like_article = ArticleLike.objects.filter(
user=user, article=article).first()
if book_mark_article:
response_data["isBookMark"] = True
else:
response_data["isBookMark"] = False
if like_article:
response_data["isLike"] = like_article.is_like
else:
response_data["isLike"] = 2
return Response(create_response({
"article": response_data, "next_article": next_article, "prev_article": prev_article}))
raise NoarticleFound
def post(self, request, *args, **kwargs):
if request.user.is_authenticated:
article_id = self.request.POST.get("article_id", "")
is_like = self.request.POST.get("isLike", "")
user = self.request.user
article = Article.objects.filter(id=article_id).first()
if article:
if is_like and int(is_like) <= 2:
article_like, created = ArticleLike.objects.get_or_create(
user=user, article=article)
article_like.is_like = is_like
article_like.save()
serializer = ArticleLikeSerializer(article_like)
return Response(create_response({
"Msg": "Article like status changed", "article": serializer.data
}))
else:
return Response(create_error_response({
"Msg": "Invalid Input"
}))
else:
return Response(create_error_response({"Msg": "News doesn't exist"}), status=400)
raise Http404
class ArticleBookMarkAPIView(APIView):
permission_classes = (IsAuthenticated,)
def post(self, request, *args, **kwargs):
if request.data:
article_id = request.data["article_id"]
else:
article_id = self.request.POST.get("article_id", "")
user = self.request.user
if article_id:
article = Article.objects.filter(id=article_id).first()
if article:
bookmark_article, created = \
BookmarkArticle.objects.get_or_create(user=user,
article=article)
if not created:
del_bookmark_article = BookmarkArticleSerializer(bookmark_article)
del_bookmark = del_bookmark_article.data
del_bookmark["status"] = 0
bookmark_article.delete()
return Response(create_response({
"Msg": "Article removed from bookmark list", "bookmark_article": del_bookmark
}))
else:
bookmark_article = BookmarkArticleSerializer(bookmark_article)
return Response(create_response({
"Msg": "Article bookmarked successfully", "bookmark_article": bookmark_article.data
}))
raise NoarticleFound
class ArticleRecommendationsAPIView(APIView):
permission_classes = (AllowAny,)
def format_response(self, response):
results = []
if response['hits']['hits']:
for result in response['hits']['hits']:
results.append(result["_source"])
return results
def get(self, request, *args, **kwargs):
article_id = self.kwargs.get("article_id", "")
if article_id:
results = es.search(index='recommendation', body={"query": {"match": {"id": int(article_id)}}})
if results['hits']['hits']:
recommendation = results['hits']['hits'][0]['_source']['recommendation']
search_results = es.search(index='article', body={
"query": {"terms": {"id": recommendation}}, "size": 25})
return Response(create_response({
"results": self.format_response(search_results)
}))
return Response(create_error_response({
"Msg": "Error generating recommendation"
}))
class ForgotPasswordAPIView(APIView):
permission_classes = (AllowAny,)
def genrate_password(self, password_length=10):
"""
Returns a random pasword of length password_length.
"""
random = str(uuid.uuid4())
random = random.upper()
random = random.replace("-", "")
return random[0:password_length]
def send_mail_to_user(self, email, password, first_name="", last_name=""):
username = first_name + " " + last_name
email_subject = 'NewsPost: Forgot Password Request'
email_body = """
<html>
<head>
</head>
<body>
<p>
Hello """ + username + """,<br><br><b>
""" + password + """</b> is your new password
<br>
<br>
Thanks,<br>
The NewsPost Team<br>
</p>
</body>
</html>"""
msg = EmailMultiAlternatives(
email_subject, '', settings.EMAIL_FROM, [email])
ebody = email_body
msg.attach_alternative(ebody, "text/html")
msg.send(fail_silently=False)
def post(self, request, *args, **kwargs):
email = request.data["email"]
if email:
user = BaseUserProfile.objects.filter(email=email)
if user:
user = user.first()
password = self.genrate_password()
self.send_mail_to_user(
email, password, user.first_name, user.last_name)
user.set_password(password)
user.save()
return Response(create_response({
"Msg": "New password sent to your email"
}))
return Response(create_error_response({
"Msg": "Email Does Not Exist"
}))
class ChangePasswordAPIView(APIView):
permission_classes = (IsAuthenticated,)
def post(self, request, *args, **kwargs):
if request.data:
password = request.data["password"]
old_password = request.data["old_password"]
confirm_password = request.data["confirm_password"]
else:
password = self.request.POST.get("password", "")
old_password = self.request.POST.get("old_password", "")
confirm_password = self.request.POST.get("confirm_password", "")
user = self.request.user
if old_password:
if not user.check_password(old_password):
msg = "Old Password Does Not Match With User"
return Response(create_error_response({
"Msg": msg, "field": "old_password"
}))
if confirm_password != password:
msg = "Password and Confirm Password does not match"
return Response(create_error_response({
"Msg": msg, "field": "confirm_password"
}))
if old_password == password:
msg = "New password should not same as Old password"
return Response(create_error_response({
"Msg": msg, "field": "password"
}))
if user and password:
user.set_password(password)
user.save()
return Response(create_response({
"Msg": "Password changed successfully", "field": "confirm_password"
}))
else:
return Response(create_error_response({
"Msg": "Password field is required", "field": "password"
}))
else:
return Response(create_error_response({
"Msg": "Old Password field is required", "field": "old_password"
}))
class BookmarkArticleAPIView(APIView):
"""
This class is used to get user bookmark list
"""
permission_classes = (IsAuthenticated,)
def get(self, request):
user = self.request.user
bookmark_list = BookmarkArticleSerializer(BookmarkArticle.objects.filter(user=user), many=True)
return Response(create_response({"results": bookmark_list.data}))
class ArticleLikeAPIView(APIView):
"""
This class is used to get user articles
"""
permission_classes = (IsAuthenticated,)
def get(self, request):
like_list = [0, 1]
user = self.request.user
article_list = ArticleLikeSerializer(ArticleLike.objects.filter(user=user, is_like__in=like_list), many=True)
return Response(create_response({"results": article_list.data}))
class HashTagAPIView(ListAPIView):
serializer_class = HashTagSerializer
permission_classes = (AllowAny,)
def get_queryset(self):
weekly = self.request.GET.get("weekly", "")
monthly = self.request.GET.get("monthly", "")
end = datetime.utcnow()
pst = pytz.timezone('Asia/Kolkata')
end = pst.localize(end)
utc = pytz.UTC
end = end.astimezone(utc)
articles = Article.objects.all()
queryset = HashTag.objects.all()
if weekly:
weekly = int(weekly)
start = end - timedelta(days=7 * weekly)
hash_tags = articles.filter(published_on__range=(start, end)).values(
'hash_tags__name').annotate(count=Count('hash_tags')).order_by('-count')[:10]
for hashtag in hash_tags:
hashtag['name'] = hashtag.pop('hash_tags__name')
queryset = hash_tags
if monthly:
monthly = int(monthly)
start = end - timedelta(days=30 * monthly)
hash_tags = articles.filter(published_on__range=(start, end)).values(
'hash_tags__name').annotate(count=Count('hash_tags')).order_by('-count')[:10]
for hashtag in hash_tags:
hashtag['name'] = hashtag.pop('hash_tags__name')
queryset = hash_tags
if not weekly and not monthly:
start = end - timedelta(days=1)
hash_tags = articles.filter(published_on__range=(start, end)).values(
'hash_tags__name').annotate(count=Count('hash_tags')).order_by('-count')[:10]
for hashtag in hash_tags:
hashtag['name'] = hashtag.pop('hash_tags__name')
queryset = hash_tags
return queryset
def list(self, request, *args, **kwargs):
queryset = self.filter_queryset(self.get_queryset())
page = self.paginate_queryset(queryset)
if page is not None:
serializer = self.get_serializer(page, many=True)
if serializer.data:
paginated_response = self.get_paginated_response(serializer.data)
return Response(create_response(paginated_response.data))
else:
return Response(create_error_response({"Msg": "No trending tags"}), status=400)
serializer = self.get_serializer(queryset, many=True)
return Response(create_response(serializer.data))
class ArticleSearchAPI(APIView):
"""
this view is used for article search and filter
"""
permission_classes = (AllowAny,)
def format_response(self, response):
results = []
filters = {}
if response.hits.hits:
for result in response.hits.hits:
source = result["_source"]
if 'highlight' in result:
if 'title' in result['highlight']:
source['title'] = " ".join(result['highlight']['title'])
if 'blurb' in result['highlight']:
source['blurb'] = " ".join(result['highlight']['blurb'])
results.append(source)
if response.aggregations.category.buckets:
filters["category"] = sorted(
response.aggregations.category.buckets._l_,
key=operator.itemgetter("key"))
if response.aggregations.source.buckets:
filters["source"] = sorted(
response.aggregations.source.buckets._l_,
key=operator.itemgetter("key"))
if response.aggregations.hash_tags.buckets:
filters["hash_tags"] = sorted(
response.aggregations.hash_tags.buckets._l_,
key=operator.itemgetter("key"))
return results, filters
def get(self, request):
page = self.request.GET.get("page", "1")
if page.isdigit():
page = int(page)
else:
page = 1
size = self.request.GET.get("rows", "20")
if size.isdigit():
size = int(size)
else:
size = 20
query = self.request.GET.get("q", "")
source = self.request.GET.getlist("source", [])
category = self.request.GET.getlist("category", [])
domain = self.request.GET.getlist("domain", [])
tags = self.request.GET.getlist("tag", [])
sort = self.request.GET.get("sort", "desc")
if not domain:
return Response(create_serializer_error_response({"domain": ["Domain id is required"]}))
# mort like this for related queries
mlt_fields = ["has_tags"]
if source:
mlt_fields = ["has_tags", "source", "domain"]
mlt = Search(using=es, index="article").query("more_like_this", fields=mlt_fields,
like=query, min_term_freq=1, max_query_terms=12).source(mlt_fields)
mlt.execute()
sr = Search(using=es, index="article")
# highlight title and blurb containing query
sr = sr.highlight("title", "blurb", fragment_size=20000)
# generate elastic search query
must_query = [{"wildcard": {"cover_image": "*"}}]
should_query = []
if query:
query = query.lower()
must_query.append({"multi_match": {"query": query,
"fields": ["title", "blurb"], 'type': 'phrase'}})
if tags:
tags = [tag.lower().replace("-", " ") for tag in tags]
for tag in tags:
sq = {"match_phrase": {"hash_tags": tag}}
should_query.append(sq)
if must_query:
sr = sr.query("bool", must=must_query)
if should_query:
if len(should_query) > 1:
sr = sr.filter("bool", should=should_query)
else:
sr = sr.filter("bool", should=should_query[0])
if domain:
sr = sr.filter("terms", domain=list(domain))
if category:
cat_objs = Category.objects.filter(name__in=category)
category = cat_objs.values_list("id", flat=True)
cat_assn_objs = CategoryAssociation.objects.filter(
parent_cat__in=cat_objs).values_list(
"child_cat__id", flat=True)
if cat_assn_objs:
new_category = set(list(cat_assn_objs) + list(category))
sr = sr.filter("terms", category_id=list(new_category))
else:
if category:
sr = sr.filter("terms", category_id=list(category))
if source:
source = [s.lower() for s in source]
sr = sr.filter("terms", source__keyword=source)
sr = sr.sort({"article_score": {"order": sort}})
sr = sr.sort({"published_on": {"order": sort}})
# pagination
start = (page - 1) * size
end = start + size
sr = sr[start:end]
# generate facets
sr.aggs.bucket("category", "terms", field="category.keyword")
sr.aggs.bucket("source", "terms", field="source.keyword")
sr.aggs.bucket("hash_tags", "terms", field="hash_tags.keyword", size=50)
# execute query
response = sr.execute()
results, filters = self.format_response(response)
count = response["hits"]["total"]
total_pages = math.ceil(count / size)
url = request.build_absolute_uri()
if end < count:
next_page = page + 1
next_url = replace_query_param(url, "page", next_page)
else:
next_url = None
if page != 1:
previous_page = page - 1
previous_url = replace_query_param(url, "page", previous_page)
else:
previous_url = None
data = {
"results": results,
"filters": filters,
"count": count,
"total_pages": total_pages,
"current_page": page,
"next": next_url,
"previous": previous_url
}
return Response(create_response(data))
class MenuAPIView(APIView):
"""
This Api will return all the menus
"""
permission_classes = (AllowAny,)
def get(self, request):
domain_id = self.request.GET.get("domain")
if not domain_id:
return Response(create_error_response({"domain": ["Domain id is required"]}))
domain = Domain.objects.filter(domain_id=domain_id).first()
if not domain:
return Response(create_error_response({"domain": ["Domain id is required"]}))
menus = MenuSerializer(Menu.objects.filter(domain=domain), many=True)
menus_list = menus.data
new_menulist = []
for menu in menus_list:
menu_dict = {}
menu_dict['heading'] = menu
new_menulist.append(menu_dict)
return Response(create_response({'results': new_menulist}))
class DevicesAPIView(APIView):
"""
this api will add device_id and device_name
"""
permission_classes = (IsAuthenticated,)
def post(self, request, *args, **kwargs):
user = self.request.user
device_id = self.request.POST.get("device_id", "")
device_name = self.request.POST.get("device_name", "")
if not user.is_anonymous and device_id and device_name:
user_device = Devices.objects.filter(user=user.pk)
if user_device:
user_device.update(device_id=device_id, device_name=device_name, user=user.id)
return Response(create_response({"Msg": "Device successfully created"}))
elif not user_device:
get, created = Devices.objects.get_or_create(device_id=device_id, device_name=device_name, user=user.id)
if created:
return Response(create_response({"Msg": "Device successfully created"}))
else:
return Response(create_response({"Msg": "Device already exist"}))
elif device_id and device_name:
get, created = Devices.objects.get_or_create(device_id=device_id, device_name=device_name)
if created:
return Response(create_response({"Msg": "Device successfully created"}))
else:
return Response(create_response({"Msg": "Device already exist"}))
else:
return Response(create_error_response({"Msg": "device_id and device_name field are required"}))
class NotificationAPIView(APIView):
"""
this api will add notification data
"""
permission_classes = (AllowAny,)
def post(self, request):
device_id = request.data["device_id"]
device_name = request.data["device_name"]
breaking_news = request.data["breaking_news"]
daily_edition = request.data["daily_edition"]
personalized = request.data["personalized"]
device = Devices.objects.get(device_id=device_id, device_name=device_name)
if breaking_news and daily_edition and personalized and device:
notification = Notification.objects.filter(device=device)
if notification:
notification.update(breaking_news=breaking_news, daily_edition=daily_edition, personalized=personalized)
return Response(create_response({"Msg": "Notification updated successfully"}))
Notification.objects.create(breaking_news=breaking_news, daily_edition=daily_edition,
personalized=personalized, device=device)
return Response(create_response({"Msg": "Notification created successfully"}))
else:
return Response(
create_error_response(
{"Msg": "device_id, device_name, breaking_news, daily_edition and personalized are required"}))
def get(self, request):
device_id = request.GET.get("device_id")
device_name = request.GET.get("device_name")
device = Devices.objects.filter(device_id=device_id, device_name=device_name).first()
if device:
notification = NotificationSerializer(Notification.objects.fitler(device=device), many=True)
return Response(create_response(notification.data))
return Response(create_error_response({"Msg": "Invalid device_id or device_name"}))
class SocialLoginView(generics.GenericAPIView):
"""
this view is used for google social authentication and login
"""
permission_classes = (AllowAny,)
serializer_class = BaseUserProfileSerializer
def decode_google_token(self, token_id):
"""
this method is used to decode and verify google token
"""
request = grequests.Request()
try:
id_info = id_token.verify_oauth2_token(token_id, request)
return id_info
except Exception as e:
log.debug("error in google token verification {0}".format(e))
return False
def get_name_details(self, id_info):
"""
this methos is used to get first name and last name from id_info
details
"""
first_name = last_name = ""
if "name" in id_info:
name = id_info.get("name")
name_list = name.split(" ")
first_name = name_list[0]
if len(name_list) > 1:
last_name = " ".join(name_list[1:])
if not first_name:
if "given_name" in id_info:
first_name = id_info.get("given_name")
if not last_name:
if "family_name" in id_info:
last_name = id_info.get("family_name")
return first_name, last_name
def create_user_profile(self, first_name, last_name, username, email, image_url, sid, provider):
"""
this method is used to create base user profile object for given
social account
"""
user = BaseUserProfile.objects.filter(email=email).first()
created = ""
if not user:
user = BaseUserProfile.objects.create(
first_name=first_name,
last_name=last_name,
email=email,
username=username
)
sa_obj, created = SocialAccount.objects.get_or_create(
social_account_id=sid,
image_url=image_url,
user=user,
provider=provider
)
# create_profile_image.delay(sa_obj.id)
return user, created
def get_facebook_data(self, token_id):
"""
this method is used to get facebook user data from given access token
"""
graph = facebook.GraphAPI(access_token=token_id)
try:
res_data = graph.get_object(
id='me?fields=email,id,first_name,last_name,name,picture.width(150).height(150)')
return res_data
except Exception as e:
log.debug("error in facebook fetch data: {0}".format(e))
return False
def get_facebook_name_details(self, profile_data):
"""
this method is used to get facebook first_name last_name from profile
data
"""
name = first_name = last_name = ""
if "first_name" in profile_data:
first_name = profile_data.get("first_name")
if "last_name" in profile_data:
last_name = profile_data.get("last_name")
if "name" in profile_data:
name = profile_data.get("name")
name_list = name.split(" ")
if not first_name:
first_name = name_list[0]
if not last_name:
last_name = " ".join(name[1:])
return first_name, last_name
def get_user_serialize_data(self, email, device_id, device_name):
"""
this method will return customize user data
"""
user = BaseUserProfile.objects.filter(email=email).first()
device = Devices.objects.filter(user=user.id)
if device:
device.update(device_name=device_name, device_id=device_id)
else:
device, created = Devices.objects.get_or_create(device_name=device_name, device_id=device_id)
Devices.objects.filter(pk=device.pk).update(user=user)
notification = NotificationSerializer(Notification.objects.get_or_create(device=device), many=True)
token, _ = Token.objects.get_or_create(user=user)
data = BaseUserProfileSerializer(user).data
data["token"] = token.key
data["breaking_news"] = notification.data[0]['breaking_news']
data["daily_edition"] = notification.data[0]['daily_edition']
data["personalized"] = notification.data[0]['personalized']
return data
def post(self, request, *args, **kwargs):
"""
this is post method for collection google social auth data
and generate authentication api token for user
"""
token_id = request.data.get("token_id")
provider = request.data.get("provider")
device_id = request.data.get("device_id")
device_name = request.data.get("device_name")
if not token_id:
raise TokenIDMissing()
if not provider:
raise ProviderMissing()
if not device_id:
return Response(create_error_response({"Msg": "device_id is missing or Invalid device_id"}))
if not device_name:
return Response(create_error_response({"Msg": "device_name is missing or Invalid device_name"}))
if provider not in SOCIAL_AUTH_PROVIDERS:
raise ProviderMissing()
if provider == "google":
id_info = self.decode_google_token(token_id)
if not id_info:
raise SocialAuthTokenException()
first_name, last_name = self.get_name_details(id_info)
email = id_info.get("email", "")
if not email:
raise SocialAuthTokenException()
username = email.split("@")[0]
google_id = id_info.get("sub", "")
image_url = id_info.get("picture", "")
user, created = self.create_user_profile(
first_name, last_name, username, email, image_url, google_id, provider)
user_data = self.get_user_serialize_data(email, device_id, device_name)
return Response(create_response({"user": user_data}))
if provider == "facebook":
profile_data = self.get_facebook_data(token_id)
if not profile_data:
raise SocialAuthTokenException()
first_name, last_name = self.get_facebook_name_details(
profile_data)
email = profile_data.get("email")
if not email:
raise SocialAuthTokenException()
username = username = email.split("@")[0]
facebook_id = profile_data.get("id", "")
image_url = ""
if "picture" in profile_data:
if "data" in profile_data["picture"]:
image_url = profile_data["picture"]["data"]["url"]
user, created = self.create_user_profile(
first_name, last_name, username, email, image_url, facebook_id, provider)
user_data = self.get_user_serialize_data(email, device_id, device_name)
return Response(create_response({"user": user_data}))
raise ProviderMissing()
class TrendingArticleAPIView(APIView):
permission_classes = (AllowAny,)
def get(self, request, format=None, *args, **kwargs):
"""
List all the trending articles
"""
domain_id = self.request.GET.get("domain")
if not domain_id:
return Response(create_error_response({"domain": ["Domain id is required"]}))
domain = Domain.objects.filter(domain_id=domain_id).first()
if not domain:
return Response(create_error_response({"domain": ["Invalid domain name"]}))
source = TrendingArticleSerializer(TrendingArticle.objects.filter(domain=domain), many=True)
return Response(create_response({"results": source.data}))
class SocailMediaPublishing():
"""
this class is to update news arrticles on social media
"""
def twitter(self, data):
"""
this function will tweet article title and its url in twitter
"""
try:
auth = tweepy.OAuthHandler(settings.TWITTER_CONSUMER_KEY, settings.TWITTER_CONSUMER_SECRET)
auth.set_access_token(settings.TWITTER_ACCESS_TOKEN, settings.TWITTER_ACCESS_TOKEN_SECRET)
api = tweepy.API(auth)
api.update_status(data["title"] + "\n" + data["url"])
except Exception as e:
print("Error in twitter post: ", e)
class ArticleCreateUpdateView(APIView, SocailMediaPublishing):
"""
Article create update view
"""
permission_classes = (IsAuthenticated,)
def get_tags(self, tags):
"""
this method will return tag name from tags objects
"""
tag_list = []
for tag in tags:
tag_list.append(tag["name"])
return tag_list
def publish(self, obj):
serializer = ArticleSerializer(obj)
json_data = serializer.data
if json_data["hash_tags"]:
tag_list = self.get_tags(json_data["hash_tags"])
json_data["hash_tags"] = tag_list
ingest_to_elastic([json_data], "article", "article", "id")
tweet_data = {
"title": serializer.instance.title,
"url": serializer.instance.source_url,
}
self.twitter(tweet_data)
def post(self, request):
publish = request.data.get("publish")
# origin is used to join with cover image
# to generate proper image url
origin = request.META.get("HTTP_ORIGIN")
cover_image_id = request.data.get("cover_image_id")
if cover_image_id:
DraftMedia.objects.filter(id=cover_image_id).delete()
if not request.data.get("cover_image"):
request.data["cover_image"] = "/".join(
[origin, request.user.domain.default_image.url])
context = {"publish": publish, "user": request.user}
serializer = ArticleCreateUpdateSerializer(
data=request.data, context=context)
if serializer.is_valid():
serializer.save()
if publish:
self.publish(serializer.instance)
return Response(create_response(serializer.data))
return Response(create_error_response(serializer.errors), status=400)
def put(self, request):
_id = request.data.get("id")
publish = request.data.get("publish")
# origin is used to join with cover image
# to generate proper image url
origin = request.META.get("HTTP_ORIGIN")
cover_image_id = request.data.get("cover_image_id")
if cover_image_id:
DraftMedia.objects.filter(id=cover_image_id).delete()
if not request.data.get("cover_image"):
request.data["cover_image"] = "/".join(
[origin, request.user.domain.default_image.url])
context = {"publish": publish, "user": request.user}
article = Article.objects.get(id=_id)
serializer = ArticleCreateUpdateSerializer(
article, data=request.data, context=context)
if serializer.is_valid():
serializer.save()
if publish:
self.publish(serializer.instance)
return Response(create_response(serializer.data))
return Response(create_error_response(serializer.errors), status=400)
class ChangeArticleStatusView(APIView, SocailMediaPublishing):
"""
this view is used to update status of given article activate or deactivate
"""
permission_classes = (IsAuthenticated,)
def get_tags(self, tags):
"""
this method will return tag name from tags objects
"""
tag_list = []
for tag in tags:
tag_list.append(tag["name"])
return tag_list
def publish(self, obj):
serializer = ArticleSerializer(obj)
json_data = serializer.data
if obj.active:
if json_data["hash_tags"]:
tag_list = self.get_tags(json_data["hash_tags"])
json_data["hash_tags"] = tag_list
ingest_to_elastic([json_data], "article", "article", "id")
tweet_data = {
"title": serializer.instance.title,
"url": serializer.instance.source_url,
}
self.twitter(tweet_data)
else:
delete_from_elastic([json_data], "article", "article", "id")
def post(self, request):
_id = request.data.get("id")
article = Article.objects.filter(id=_id).first()
if not article:
return Response(create_error_response({"error": "Article does not exists"}), status=400)
article.active = request.data.get("activate")
article.save()
self.publish(article)
return Response(create_response({
"id": article.id, "active": article.active}))
class CategoryBulkUpdate(APIView):
"""
update whole bunch of articles in one go
"""
permission_classes = (IsAuthenticated,)
def get_tags(self, tags):
"""
this method will return tag name from tags objects
"""
tag_list = []
for tag in tags:
tag_list.append(tag["name"])
return tag_list
def post(self, request):
category_id = request.data['categories']
category = Category.objects.get(id=category_id)
for article_id in request.data['articles']:
current = Article.objects.get(id=article_id)
current.category = category
current.save()
serializer = ArticleSerializer(current)
json_data = serializer.data
delete_from_elastic([json_data], "article", "article", "id")
if json_data["hash_tags"]:
tag_list = self.get_tags(json_data["hash_tags"])
json_data["hash_tags"] = tag_list
ingest_to_elastic([json_data], "article", "article", "id")
return Response({"ok": "cool"})
class GetDailyDigestView(ListAPIView):
serializer_class = ArticleSerializer
permission_classes = (AllowAny,)
def format_response(self, response):
results = []
if response.hits.hits:
for result in response.hits.hits:
results.append(result["_source"])
return results
def get_queryset(self):
device_id = self.request.GET.get("device_id", "")
queryset = Devices.objects.filter(device_id=device_id)
dd = DailyDigest.objects.filter(device__in=queryset)
if not queryset.exists() or not dd.exists():
return []
return dd.first().articles.all().order_by("-published_on")
def list(self, request, *args, **kwargs):
queryset = self.get_queryset()
if not queryset:
sr = Search(using=es, index="article")
sort = "desc"
sr = sr.sort({"article_score": {"order": sort}})
sr = sr.sort({"published_on": {"order": sort}})
sr = sr[0:20]
response = sr.execute()
results = self.format_response(response)
return Response(create_response({"results": results}))
serializer = self.get_serializer(queryset, many=True)
if serializer.data:
return Response(create_response(serializer.data))
else:
return Response(create_error_response({"Msg": "Daily Digest Doesn't Exist"}), status=400)
class DraftMediaUploadViewSet(viewsets.ViewSet):
"""
this view is used to upload article images
"""
permission_classes = (IsAuthenticated,)
def create(self, request):
image_file = request.data.get("image")
if not image_file:
return Response(create_error_response({"error": "Image file is required."}))
draft_image = DraftMedia.objects.create(image=image_file)
serializer = DraftMediaSerializer(draft_image)
return Response(create_response(serializer.data))
def update(self, request, pk):
image_file = request.data.get("image")
if not image_file:
return Response(create_error_response({"error": "Image file is required."}))
draft_image = DraftMedia.objects.get(id=pk)
if not draft_image:
return Http404
draft_image.image = image_file
draft_image.save()
serializer = DraftMediaSerializer(draft_image)
return Response(create_response(serializer.data))
def destroy(self, request, pk):
draft_image = DraftMedia.objects.get(id=pk)
if not draft_image:
return Http404
draft_image.delete()
return Response(create_response({"Msg": "Image deleted successfully"}))
class CommentViewSet(viewsets.ViewSet):
serializer_class = CommentSerializer
permission_classes = (IsAuthenticated,)
pagination_class = PostpageNumberPagination
ordering = "-created_at"
def get_permissions(self):
"""
Instantiates and returns the list of permissions that this view requires.
"""
if self.action == 'list':
self.permission_classes = [AllowAny]
else:
self.permission_classes = [IsAuthenticated]
return [permission() for permission in self.permission_classes]
def create(self, request):
captcha_response_key = 0
captcha_key = request.data.get("captcha_key")
captcha_value = request.data.get("captcha_value")
captcha = CaptchaStore.objects.filter(hashkey=captcha_key).first()
if not captcha:
return Response(create_error_response({"error": "Invalid Captcha"}))
if captcha.response != captcha_value.lower():
return Response(create_error_response({"error": "Invalid Captcha"}))
data = request.data.copy()
data["user"] = request.user.id
serializer = CommentSerializer(data=data)
if serializer.is_valid():
serializer.save()
return Response(create_response({"result": serializer.data}))
return Response(create_error_response({"error": "Enter Valid data"}))
def list(self, request):
article_id = request.GET.get("article_id", "")
if not article_id:
return Response(
create_error_response(
{"error": "Article ID has not been entered by the user"}
)
)
article_obj = Article.objects.filter(id=article_id).first()
if not article_obj:
return Response(create_error_response({"error": "Article does not exist"})
)
comment_list = Comment.objects.filter(article=article_obj, reply=None)
serializer = CommentSerializer(comment_list, many=True)
return Response(
create_response(
{"results": serializer.data, "total_article_likes": ArticleLike.objects.filter(
article=article_obj).count()}))
def destroy(self, request, pk):
comment_obj = Comment.objects.filter(id=pk)
if not comment_obj:
return Response(create_error_response({"error": "Comment does not exist"}))
comment_obj.delete()
return Response(create_response({"Msg": "Comment deleted successfully"}))
class LikeAPIView(APIView):
permission_classes = (IsAuthenticated,)
pagination_class = PostpageNumberPagination
ordering = "-created_at"
def post(self, request):
post_data = request.data.copy()
post_data["user"] = request.user.id
serializer = ArticleLikeSerializer(data=post_data)
if serializer.is_valid():
serializer.save()
if serializer.data.get("id"):
return Response(create_response({"Msg": "Liked"}))
return Response(create_response({"Msg": "Removed Like"}))
return Response(create_error_response({"error": "Invalid Data Entered"}))
class CaptchaCommentApiView(APIView):
permission_classes = (IsAuthenticated,)
def get(self, request):
captcha_len = len(CaptchaStore.objects.all())
if captcha_len > 500:
captcha = CaptchaStore.objects.order_by('?')[:1]
to_json_response = dict()
to_json_response['status'] = 1
to_json_response['new_captch_key'] = captcha[0].hashkey
to_json_response['new_captch_image'] = captcha_image_url(to_json_response['new_captch_key'])
return Response(create_response({"result": to_json_response}))
else:
to_json_response = dict()
to_json_response['status'] = 1
to_json_response['new_captch_key'] = CaptchaStore.generate_key()
to_json_response['new_captch_image'] = captcha_image_url(to_json_response['new_captch_key'])
return Response(create_response({"result": to_json_response}))
class AutoCompleteAPIView(generics.GenericAPIView):
permission_classes = (AllowAny,)
def format_response(self, response):
results = []
if response['hits']['hits']:
for result in response['hits']['hits']:
results.append(result["_source"])
return results
def get(self, request):
result_list = []
if request.data:
query = request.data["q"]
else:
query = request.GET.get("q", "")
if query:
results = es.search(
index="auto_suggestions",
body={
"suggest": {
"results": {
"text": query,
"completion": {"field": "name_suggest"},
}
}
},
)
results = results['suggest']['results'][0]['options']
if results:
for result in results:
result_list.append(
{
"value": result["_source"]["name_suggest"],
"key": result["_source"]["desc"],
}
)
return Response(create_response({"result": result_list}))
return Response(create_response({"result": []}))
class SubsAPIView(ListAPIView):
serializer_class = SubsMediaSerializer
permission_classes = (AllowAny,)
pagination_class = PostpageNumberPagination
def get(self, request):
q = self.request.GET.get("q", None)
subs = Subscription.objects.all()
if q:
subs = subs.filter(user__email__icontains=q)
source = SubsMediaSerializer(subs, many=True)
return Response(create_response({"results": source.data}))
class UpdateSubsAPIView(APIView):
serializer_class = SubsMediaSerializer
permission_classes = (AllowAny,)
def get(self, request, pk):
source = SubsMediaSerializer(Subscription.objects.get(id=pk))
return Response(create_response({"results": source.data}))
def post(self, request, *args, **kwargs):
subs_id = self.request.POST.get('id')
subs = Subscription.objects.filter(id=subs_id)
if subs.exists():
subs = subs.first()
subs.subs_type = self.request.POST.get('subs_type')
auto_renew = self.request.POST.get('auto_renew')
if auto_renew == 'No':
subs.auto_renew = False
else:
subs.auto_renew = True
subs.save()
return Response(create_response({"results": "success"}))
return Response(create_response({"results": "error"}))
class UserProfileAPIView(APIView):
permission_classes = (IsAuthenticated, )
def get(self, request, *args, **kwargs):
user = BaseUserProfile.objects.filter(id=self.request.user.id).first()
serializer = UserProfileSerializer(user)
data = serializer.data
response_data = create_response({"user": data})
return Response(response_data)
def put(self, request, format=None):
if request.user.is_authenticated:
if request.data:
_id = request.data["id"]
else:
_id = self.request.POST.get('id')
user = BaseUserProfile.objects.get(id=_id)
serializer = UserProfileSerializer(user, data=request.data)
if serializer.is_valid():
serializer.save()
return Response(create_response({"result":serializer.data, "Msg":"Profile updated successfully."}))
return Response(create_error_response(serializer.errors), status=400)
raise Http404
class AccessSession(APIView):
permission_classes = (AllowAny,)
def get(self, request):
print(request.META.items())
request.session["ip"] = request.META.get('REMOTE_ADDR')
return Response(create_response({"results": request.session._session_key}))
class RSSAPIView(APIView):
permission_classes = (AllowAny,)
def get(self, request):
data = {}
domain = request.GET.get("domain")
if domain:
domain_obj = Domain.objects.filter(domain_id=domain).first()
if domain_obj:
menus = Menu.objects.filter(domain=domain_obj)
for menu in menus:
all_categories = menu.submenu.all()
for category in all_categories:
data[category.name.name] = "/article/rss/?domain=" + domain + "&category=" + category.name.name
return Response(create_response({"results": data}))
return Response(create_error_response({"error": "Domain do not exist."}))
return Response(create_error_response({"error": "Domain is required"})) | |
constans.js | export const urlDababaseCollectionMainPage =
"https://applestoredb.firebaseio.com/collectionMainPage.json";
export const urlDatabaseCollectionName =
"https://applestoredb.firebaseio.com/collectionNames.json";
export const urlDatabaseBuy = "https://applestoredb.firebaseio.com/orders.json"; | export const urlDatabase = "https://applestoredb.firebaseio.com/"; |
|
pconn.go | package main
import (
"net"
"time"
)
const tcpDialTimeout = 10 * time.Second
// A PConn is a persistent TCP connection. It opens a connection lazily when
// used and reopens the connection on errors.
// It can also be thought of as a connection pool of size 1.
type PConn struct {
c *net.TCPConn
addr string
}
func DialPConn(addr string) *PConn |
func (c *PConn) connect() error {
conn, err := net.DialTimeout("tcp", c.addr, tcpDialTimeout)
if err != nil {
return err
}
c.c = conn.(*net.TCPConn)
if err := c.c.SetKeepAlivePeriod(tcpKeepAlivePeriod); err != nil {
return err
}
return c.c.SetKeepAlive(true)
}
func (c *PConn) Write(b []byte) (int, error) {
// For now, just do one retry -- we could introduce more with backoff,
// etc later.
hadConn := c.c != nil
if c.c == nil {
if err := c.connect(); err != nil {
return 0, err
}
}
n, err := c.c.Write(b)
if err != nil {
// TODO: I could convert to net.Error and check Timeout() and/or
// Temporary() -- is that useful?
if hadConn {
c.c.Close()
c.c = nil
return c.Write(b)
}
}
return n, err
}
func (c *PConn) Close() error {
if c.c != nil {
return c.c.Close()
}
return nil
}
| {
return &PConn{addr: addr}
} |
zz_generated.deepcopy.go | //go:build !ignore_autogenerated
// +build !ignore_autogenerated
/*
Copyright (c) 2021 SAP SE or an SAP affiliate company. All rights reserved. This file is licensed under the Apache Software License, v. 2 except as noted otherwise in the LICENSE file
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by deepcopy-gen. DO NOT EDIT.
package v1alpha1
import (
corev1alpha1 "github.com/gardener/gardener/pkg/apis/core/v1alpha1"
v1 "k8s.io/api/core/v1"
runtime "k8s.io/apimachinery/pkg/runtime"
)
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *Bastion) DeepCopyInto(out *Bastion) {
*out = *in
out.TypeMeta = in.TypeMeta
in.ObjectMeta.DeepCopyInto(&out.ObjectMeta)
in.Spec.DeepCopyInto(&out.Spec)
in.Status.DeepCopyInto(&out.Status)
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Bastion.
func (in *Bastion) DeepCopy() *Bastion {
if in == nil {
return nil
}
out := new(Bastion)
in.DeepCopyInto(out)
return out
}
// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (in *Bastion) DeepCopyObject() runtime.Object {
if c := in.DeepCopy(); c != nil {
return c
}
return nil
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *BastionIngressPolicy) DeepCopyInto(out *BastionIngressPolicy) {
*out = *in
in.IPBlock.DeepCopyInto(&out.IPBlock)
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BastionIngressPolicy.
func (in *BastionIngressPolicy) DeepCopy() *BastionIngressPolicy {
if in == nil {
return nil
}
out := new(BastionIngressPolicy)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *BastionList) DeepCopyInto(out *BastionList) {
*out = *in
out.TypeMeta = in.TypeMeta
in.ListMeta.DeepCopyInto(&out.ListMeta)
if in.Items != nil {
in, out := &in.Items, &out.Items
*out = make([]Bastion, len(*in))
for i := range *in {
(*in)[i].DeepCopyInto(&(*out)[i])
}
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BastionList.
func (in *BastionList) DeepCopy() *BastionList {
if in == nil {
return nil
}
out := new(BastionList)
in.DeepCopyInto(out)
return out
}
// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (in *BastionList) DeepCopyObject() runtime.Object {
if c := in.DeepCopy(); c != nil {
return c
}
return nil
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *BastionSpec) DeepCopyInto(out *BastionSpec) {
*out = *in
out.ShootRef = in.ShootRef
if in.SeedName != nil {
in, out := &in.SeedName, &out.SeedName
*out = new(string)
**out = **in
}
if in.ProviderType != nil {
in, out := &in.ProviderType, &out.ProviderType
*out = new(string)
**out = **in
}
if in.Ingress != nil {
in, out := &in.Ingress, &out.Ingress
*out = make([]BastionIngressPolicy, len(*in))
for i := range *in {
(*in)[i].DeepCopyInto(&(*out)[i])
}
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BastionSpec.
func (in *BastionSpec) DeepCopy() *BastionSpec {
if in == nil {
return nil
}
out := new(BastionSpec)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *BastionStatus) DeepCopyInto(out *BastionStatus) {
*out = *in
if in.Ingress != nil {
in, out := &in.Ingress, &out.Ingress
*out = new(v1.LoadBalancerIngress)
(*in).DeepCopyInto(*out)
}
if in.Conditions != nil |
if in.LastHeartbeatTimestamp != nil {
in, out := &in.LastHeartbeatTimestamp, &out.LastHeartbeatTimestamp
*out = (*in).DeepCopy()
}
if in.ExpirationTimestamp != nil {
in, out := &in.ExpirationTimestamp, &out.ExpirationTimestamp
*out = (*in).DeepCopy()
}
if in.ObservedGeneration != nil {
in, out := &in.ObservedGeneration, &out.ObservedGeneration
*out = new(int64)
**out = **in
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BastionStatus.
func (in *BastionStatus) DeepCopy() *BastionStatus {
if in == nil {
return nil
}
out := new(BastionStatus)
in.DeepCopyInto(out)
return out
}
| {
in, out := &in.Conditions, &out.Conditions
*out = make([]corev1alpha1.Condition, len(*in))
for i := range *in {
(*in)[i].DeepCopyInto(&(*out)[i])
}
} |
monitor_private_link_scope_resource_test.go | package monitor_test
import (
"context"
"fmt"
"testing"
"github.com/hashicorp/terraform-provider-azurerm/internal/acceptance"
"github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check"
"github.com/hashicorp/terraform-provider-azurerm/internal/clients"
"github.com/hashicorp/terraform-provider-azurerm/internal/services/monitor/parse"
"github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk"
"github.com/hashicorp/terraform-provider-azurerm/utils"
)
type MonitorPrivateLinkScopeResource struct{}
func TestAccMonitorPrivateLinkScope_basic(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_monitor_private_link_scope", "test")
r := MonitorPrivateLinkScopeResource{}
data.ResourceTest(t, r, []acceptance.TestStep{
{
Config: r.basic(data),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(),
})
}
func TestAccMonitorPrivateLinkScope_requiresImport(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_monitor_private_link_scope", "test")
r := MonitorPrivateLinkScopeResource{}
data.ResourceTest(t, r, []acceptance.TestStep{
{
Config: r.basic(data),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.RequiresImportErrorStep(r.requiresImport),
})
}
func TestAccMonitorPrivateLinkScope_complete(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_monitor_private_link_scope", "test")
r := MonitorPrivateLinkScopeResource{}
data.ResourceTest(t, r, []acceptance.TestStep{
{
Config: r.complete(data, "Test"),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(),
})
}
func TestAccMonitorPrivateLinkScope_update(t *testing.T) |
func (r MonitorPrivateLinkScopeResource) Exists(ctx context.Context, client *clients.Client, state *pluginsdk.InstanceState) (*bool, error) {
id, err := parse.PrivateLinkScopeID(state.ID)
if err != nil {
return nil, err
}
resp, err := client.Monitor.PrivateLinkScopesClient.Get(ctx, id.ResourceGroup, id.Name)
if err != nil {
if utils.ResponseWasNotFound(resp.Response) {
return utils.Bool(false), nil
}
return nil, fmt.Errorf("retrieving %q %+v", id, err)
}
return utils.Bool(resp.AzureMonitorPrivateLinkScopeProperties != nil), nil
}
func (r MonitorPrivateLinkScopeResource) template(data acceptance.TestData) string {
return fmt.Sprintf(`
provider "azurerm" {
features {}
}
resource "azurerm_resource_group" "test" {
name = "acctestRG-pls-%d"
location = "%s"
}
`, data.RandomInteger, data.Locations.Primary)
}
func (r MonitorPrivateLinkScopeResource) basic(data acceptance.TestData) string {
return fmt.Sprintf(`
%s
resource "azurerm_monitor_private_link_scope" "test" {
name = "acctest-ampls-%d"
resource_group_name = azurerm_resource_group.test.name
}
`, r.template(data), data.RandomInteger)
}
func (r MonitorPrivateLinkScopeResource) requiresImport(data acceptance.TestData) string {
return fmt.Sprintf(`
%s
resource "azurerm_monitor_private_link_scope" "import" {
name = azurerm_monitor_private_link_scope.test.name
resource_group_name = azurerm_monitor_private_link_scope.test.resource_group_name
}
`, r.basic(data))
}
func (r MonitorPrivateLinkScopeResource) complete(data acceptance.TestData, tag string) string {
return fmt.Sprintf(`
%s
resource "azurerm_monitor_private_link_scope" "test" {
name = "acctest-AMPLS-%d"
resource_group_name = azurerm_resource_group.test.name
tags = {
ENV = "%s"
}
}
`, r.template(data), data.RandomInteger, tag)
}
| {
data := acceptance.BuildTestData(t, "azurerm_monitor_private_link_scope", "test")
r := MonitorPrivateLinkScopeResource{}
data.ResourceTest(t, r, []acceptance.TestStep{
{
Config: r.complete(data, "Test1"),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(),
{
Config: r.complete(data, "Test2"),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(),
})
} |
ex10_9.py | #Nilo soluction
class Estado:
def __init__(self, nome, sigla):
self.nome = nome
self.sigla = sigla
self.cidades = []
def adiciona_cidades(self, cidade):
cidade.estado = self
self.cidades.append(cidade)
def populacao(self): |
class Cidade:
def __init__(self, nome, populacao):
self.nome = nome
self.populacao = populacao
self.estado = None
def __str__(self):
return f'''Cidade (nome={self.nome}, populacao={self.populacao},
estado={self.estado})'''
# Populacoes obtidas no site da wikipedia
# IBGE estimativa 2012
am = Estado('Amazonas', 'AM')
am.adiciona_cidades(Cidade('manaus', 1861838))
am.adiciona_cidades(Cidade('Parintins', 103828))
am.adiciona_cidades(Cidade('Itacoatiara', 89064))
for estado in [am]:
print(f'Estado: {estado.nome} Sigla: {estado.sigla}')
for cidade in estado.cidades:
print(f'Cidade: {cidade.nome}, Populacao: {cidade.populacao}')
print(f'Populacaodo Estado: {estado.populacao()}\n') | return sum([c.populacao for c in self.cidades]) |
runner.go | package main
import (
"bufio"
"fmt"
"io"
"os"
"os/exec"
"path/filepath"
"strings"
"github.com/fatih/color"
)
// Runner builds, runs, stops and restarts SourcePods.
type Runner struct {
name string
env []string
args []string
cmd *exec.Cmd
restart chan bool
}
//NewRunner creates a Runner that can be restarted
func NewRunner(name string, env []string, args []string) *Runner {
return &Runner{
name: name,
env: env,
args: args,
restart: make(chan bool, 16),
}
}
//Name of the Runner
func (r *Runner) Name() string {
return r.name
}
//Run the command
func (r *Runner) Run() error {
if err := r.Build(); err == nil |
for {
_, more := <-r.restart
if more {
if r.cmd != nil {
r.Stop()
}
go func() {
r.cmd = exec.Command("./dev/"+r.name, r.args...)
r.cmd.Env = r.env
color.HiGreen("%s\n", strings.Join(r.cmd.Args, " "))
stdout, err := r.cmd.StdoutPipe()
if err != nil {
return
}
stderr, err := r.cmd.StderrPipe()
if err != nil {
return
}
multi := io.MultiReader(stdout, stderr)
if r.cmd.Start() != nil {
return
}
scanner := bufio.NewScanner(multi)
for scanner.Scan() {
fmt.Printf("%s\t%s\n", color.HiBlueString(r.name), scanner.Text())
}
if err = r.cmd.Wait(); err != nil {
return
}
}()
} else {
return nil
}
}
}
//Stop the command and process
func (r *Runner) Stop() {
if r.cmd == nil || r.cmd.Process == nil {
return
}
r.cmd.Process.Kill()
}
//Build the binary to be run afterwards. Example: make dev/api
func (r *Runner) Build() error {
cmd := exec.Command("make", "dev/"+r.name)
cmd.Stdin = os.Stdin
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
fmt.Println(strings.Join(cmd.Args, " "))
return cmd.Run()
}
//Restart the program by signaling via restart channel
func (r *Runner) Restart() {
r.restart <- true
}
//Shutdown the program by closing the restart channel and stopping the process
func (r *Runner) Shutdown() {
close(r.restart)
r.Stop()
}
// CaddyRunner runs caddy
type CaddyRunner struct {
cmd *exec.Cmd
}
//Run Caddy and print its output
func (r *CaddyRunner) Run() error {
r.cmd = exec.Command(filepath.Join(".", "dev", "caddy"), "-conf", "./dev/Caddyfile")
stdout, err := r.cmd.StdoutPipe()
if err != nil {
return err
}
stderr, err := r.cmd.StderrPipe()
if err != nil {
return err
}
multi := io.MultiReader(stdout, stderr)
if err := r.cmd.Start(); err != nil {
return err
}
scanner := bufio.NewScanner(multi)
for scanner.Scan() {
fmt.Printf("%s\t%s\n", color.HiBlueString("caddy"), scanner.Text())
}
return r.cmd.Wait()
}
//Stop Caddy server
func (r *CaddyRunner) Stop() {
if r.cmd == nil || r.cmd.Process == nil {
return
}
r.cmd.Process.Kill()
}
| {
r.restart <- true
} |
Header.tsx | import React from "react";
import Link from "next/link";
import styled from "styled-components";
import SvgButton from "../button/SvgButton";
import HamburgerIcon from "../svg/icon/HamburgerIcon";
import LymphedemaCenterLogo from "../svg/brand/LymphedemaCenterLogo";
import { scrollToTop } from "../helpers";
const Container = styled.header`
background-color: ${(props) => props.theme.colors.fill.grayscale.c600};
height: 60px;
width: 100%;
padding: 0 ${(props) => props.theme.layout.small.margin};
display: grid;
grid-template-columns: max-content 1fr max-content;
align-items: center;
`;
const InvisSvgButton = styled(SvgButton)`
visibility: hidden;
`;
const LogoLink = styled.a.attrs({
"data-cy": "LogoLink",
})`
height: 34px;
cursor: pointer;
justify-self: center;
`;
const ELymphedemaCenterLogo = styled(LymphedemaCenterLogo)`
justify-self: center;
`;
interface HeaderProps extends React.HTMLAttributes<HTMLDivElement> {
/** The function for the hamburger button. */
hamburgerOnClick: (e: React.MouseEvent<HTMLButtonElement>) => void;
}
/** - Also accepts all `React.HTMLAttributes<HTMLDivElement>` props. */
const Header = (props: HeaderProps) => (
<Container {...props}>
<SvgButton
height="34px" | data-cy="HamburgerButton"
/>
<Link href="/" passHref>
<LogoLink aria-label="Go to the homepage." onClick={scrollToTop}>
<ELymphedemaCenterLogo height="34px" />
</LogoLink>
</Link>
<InvisSvgButton
height="34px"
width="34px"
svg={<HamburgerIcon size="100%" />}
aria-label="HamburgerButton"
/>
</Container>
);
export default Header; | width="34px"
svg={<HamburgerIcon size="100%" />}
aria-label="Open up the application drawer."
onClick={props.hamburgerOnClick} |
regionck.rs | //! The region check is a final pass that runs over the AST after we have
//! inferred the type constraints but before we have actually finalized
//! the types. Its purpose is to embed a variety of region constraints.
//! Inserting these constraints as a separate pass is good because (1) it
//! localizes the code that has to do with region inference and (2) often
//! we cannot know what constraints are needed until the basic types have
//! been inferred.
//!
//! ### Interaction with the borrow checker
//!
//! In general, the job of the borrowck module (which runs later) is to
//! check that all soundness criteria are met, given a particular set of
//! regions. The job of *this* module is to anticipate the needs of the
//! borrow checker and infer regions that will satisfy its requirements.
//! It is generally true that the inference doesn't need to be sound,
//! meaning that if there is a bug and we inferred bad regions, the borrow
//! checker should catch it. This is not entirely true though; for
//! example, the borrow checker doesn't check subtyping, and it doesn't
//! check that region pointers are always live when they are used. It
//! might be worthwhile to fix this so that borrowck serves as a kind of
//! verification step -- that would add confidence in the overall
//! correctness of the compiler, at the cost of duplicating some type
//! checks and effort.
//!
//! ### Inferring the duration of borrows, automatic and otherwise
//!
//! Whenever we introduce a borrowed pointer, for example as the result of
//! a borrow expression `let x = &data`, the lifetime of the pointer `x`
//! is always specified as a region inference variable. `regionck` has the
//! job of adding constraints such that this inference variable is as
//! narrow as possible while still accommodating all uses (that is, every
//! dereference of the resulting pointer must be within the lifetime).
//!
//! #### Reborrows
//!
//! Generally speaking, `regionck` does NOT try to ensure that the data
//! `data` will outlive the pointer `x`. That is the job of borrowck. The
//! one exception is when "re-borrowing" the contents of another borrowed
//! pointer. For example, imagine you have a borrowed pointer `b` with
//! lifetime `L1` and you have an expression `&*b`. The result of this
//! expression will be another borrowed pointer with lifetime `L2` (which is
//! an inference variable). The borrow checker is going to enforce the
//! constraint that `L2 < L1`, because otherwise you are re-borrowing data
//! for a lifetime larger than the original loan. However, without the
//! routines in this module, the region inferencer would not know of this
//! dependency and thus it might infer the lifetime of `L2` to be greater
//! than `L1` (issue #3148).
//!
//! There are a number of troublesome scenarios in the tests
//! `region-dependent-*.rs`, but here is one example:
//!
//! struct Foo { i: i32 }
//! struct Bar { foo: Foo }
//! fn get_i<'a>(x: &'a Bar) -> &'a i32 {
//! let foo = &x.foo; // Lifetime L1
//! &foo.i // Lifetime L2
//! }
//!
//! Note that this comes up either with `&` expressions, `ref`
//! bindings, and `autorefs`, which are the three ways to introduce
//! a borrow.
//!
//! The key point here is that when you are borrowing a value that
//! is "guaranteed" by a borrowed pointer, you must link the
//! lifetime of that borrowed pointer (`L1`, here) to the lifetime of
//! the borrow itself (`L2`). What do I mean by "guaranteed" by a
//! borrowed pointer? I mean any data that is reached by first
//! dereferencing a borrowed pointer and then either traversing
//! interior offsets or boxes. We say that the guarantor
//! of such data is the region of the borrowed pointer that was
//! traversed. This is essentially the same as the ownership
//! relation, except that a borrowed pointer never owns its
//! contents.
use crate::check::dropck;
use crate::check::FnCtxt;
use crate::middle::mem_categorization as mc;
use crate::middle::mem_categorization::Categorization;
use crate::middle::region;
use rustc::hir::def_id::DefId;
use rustc::infer::outlives::env::OutlivesEnvironment;
use rustc::infer::{self, RegionObligation, SuppressRegionErrors};
use rustc::ty::adjustment;
use rustc::ty::subst::Substs;
use rustc::ty::{self, Ty};
use rustc::hir::intravisit::{self, NestedVisitorMap, Visitor};
use rustc::hir::{self, PatKind};
use rustc_data_structures::sync::Lrc;
use std::mem;
use std::ops::Deref;
use std::rc::Rc;
use syntax::ast;
use syntax_pos::Span;
// a variation on try that just returns unit
macro_rules! ignore_err {
($e:expr) => {
match $e {
Ok(e) => e,
Err(_) => {
debug!("ignoring mem-categorization error!");
return ();
}
}
};
}
///////////////////////////////////////////////////////////////////////////
// PUBLIC ENTRY POINTS
impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
pub fn regionck_expr(&self, body: &'gcx hir::Body) {
let subject = self.tcx.hir().body_owner_def_id(body.id());
let id = body.value.id;
let mut rcx = RegionCtxt::new(
self,
RepeatingScope(id),
id,
Subject(subject),
self.param_env,
);
// There are no add'l implied bounds when checking a
// standalone expr (e.g., the `E` in a type like `[u32; E]`).
rcx.outlives_environment.save_implied_bounds(id);
if self.err_count_since_creation() == 0 {
// regionck assumes typeck succeeded
rcx.visit_body(body);
rcx.visit_region_obligations(id);
}
rcx.resolve_regions_and_report_errors(SuppressRegionErrors::when_nll_is_enabled(self.tcx));
assert!(self.tables.borrow().free_region_map.is_empty());
self.tables.borrow_mut().free_region_map = rcx.outlives_environment.into_free_region_map();
}
/// Region checking during the WF phase for items. `wf_tys` are the
/// types from which we should derive implied bounds, if any.
pub fn regionck_item(&self, item_id: ast::NodeId, span: Span, wf_tys: &[Ty<'tcx>]) {
debug!("regionck_item(item.id={:?}, wf_tys={:?})", item_id, wf_tys);
let subject = self.tcx.hir().local_def_id(item_id);
let mut rcx = RegionCtxt::new(
self,
RepeatingScope(item_id),
item_id,
Subject(subject),
self.param_env,
);
rcx.outlives_environment
.add_implied_bounds(self, wf_tys, item_id, span);
rcx.outlives_environment.save_implied_bounds(item_id);
rcx.visit_region_obligations(item_id);
rcx.resolve_regions_and_report_errors(SuppressRegionErrors::default());
}
/// Region check a function body. Not invoked on closures, but
/// only on the "root" fn item (in which closures may be
/// embedded). Walks the function body and adds various add'l
/// constraints that are needed for region inference. This is
/// separated both to isolate "pure" region constraints from the
/// rest of type check and because sometimes we need type
/// inference to have completed before we can determine which
/// constraints to add.
pub fn regionck_fn(&self, fn_id: ast::NodeId, body: &'gcx hir::Body) {
debug!("regionck_fn(id={})", fn_id);
let subject = self.tcx.hir().body_owner_def_id(body.id());
let node_id = body.value.id;
let mut rcx = RegionCtxt::new(
self,
RepeatingScope(node_id),
node_id,
Subject(subject),
self.param_env,
);
if self.err_count_since_creation() == 0 {
// regionck assumes typeck succeeded
rcx.visit_fn_body(fn_id, body, self.tcx.hir().span(fn_id));
}
rcx.resolve_regions_and_report_errors(SuppressRegionErrors::when_nll_is_enabled(self.tcx));
// In this mode, we also copy the free-region-map into the
// tables of the enclosing fcx. In the other regionck modes
// (e.g., `regionck_item`), we don't have an enclosing tables.
assert!(self.tables.borrow().free_region_map.is_empty());
self.tables.borrow_mut().free_region_map = rcx.outlives_environment.into_free_region_map();
}
}
///////////////////////////////////////////////////////////////////////////
// INTERNALS
pub struct RegionCtxt<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {
pub fcx: &'a FnCtxt<'a, 'gcx, 'tcx>,
pub region_scope_tree: Lrc<region::ScopeTree>,
outlives_environment: OutlivesEnvironment<'tcx>,
// id of innermost fn body id
body_id: ast::NodeId,
// call_site scope of innermost fn
call_site_scope: Option<region::Scope>,
// id of innermost fn or loop
repeating_scope: ast::NodeId,
// id of AST node being analyzed (the subject of the analysis).
subject_def_id: DefId,
}
impl<'a, 'gcx, 'tcx> Deref for RegionCtxt<'a, 'gcx, 'tcx> {
type Target = FnCtxt<'a, 'gcx, 'tcx>;
fn deref(&self) -> &Self::Target {
&self.fcx
}
}
pub struct RepeatingScope(ast::NodeId);
pub struct Subject(DefId);
impl<'a, 'gcx, 'tcx> RegionCtxt<'a, 'gcx, 'tcx> {
pub fn new(
fcx: &'a FnCtxt<'a, 'gcx, 'tcx>,
RepeatingScope(initial_repeating_scope): RepeatingScope,
initial_body_id: ast::NodeId,
Subject(subject): Subject,
param_env: ty::ParamEnv<'tcx>,
) -> RegionCtxt<'a, 'gcx, 'tcx> {
let region_scope_tree = fcx.tcx.region_scope_tree(subject);
let outlives_environment = OutlivesEnvironment::new(param_env);
RegionCtxt {
fcx,
region_scope_tree,
repeating_scope: initial_repeating_scope,
body_id: initial_body_id,
call_site_scope: None,
subject_def_id: subject,
outlives_environment,
}
}
fn set_repeating_scope(&mut self, scope: ast::NodeId) -> ast::NodeId {
mem::replace(&mut self.repeating_scope, scope)
}
/// Try to resolve the type for the given node, returning `t_err` if an error results. Note that
/// we never care about the details of the error, the same error will be detected and reported
/// in the writeback phase.
///
/// Note one important point: we do not attempt to resolve *region variables* here. This is
/// because regionck is essentially adding constraints to those region variables and so may yet
/// influence how they are resolved.
///
/// Consider this silly example:
///
/// ```
/// fn borrow(x: &i32) -> &i32 {x}
/// fn foo(x: @i32) -> i32 { // block: B
/// let b = borrow(x); // region: <R0>
/// *b
/// }
/// ```
///
/// Here, the region of `b` will be `<R0>`. `<R0>` is constrained to be some subregion of the
/// block B and some superregion of the call. If we forced it now, we'd choose the smaller
/// region (the call). But that would make the *b illegal. Since we don't resolve, the type
/// of b will be `&<R0>.i32` and then `*b` will require that `<R0>` be bigger than the let and
/// the `*b` expression, so we will effectively resolve `<R0>` to be the block B.
pub fn resolve_type(&self, unresolved_ty: Ty<'tcx>) -> Ty<'tcx> {
self.resolve_type_vars_if_possible(&unresolved_ty)
}
/// Try to resolve the type for the given node.
fn resolve_node_type(&self, id: hir::HirId) -> Ty<'tcx> {
let t = self.node_ty(id);
self.resolve_type(t)
}
/// Try to resolve the type for the given node.
pub fn resolve_expr_type_adjusted(&mut self, expr: &hir::Expr) -> Ty<'tcx> {
let ty = self.tables.borrow().expr_ty_adjusted(expr);
self.resolve_type(ty)
}
/// This is the "main" function when region-checking a function item or a closure
/// within a function item. It begins by updating various fields (e.g., `call_site_scope`
/// and `outlives_environment`) to be appropriate to the function and then adds constraints
/// derived from the function body.
///
/// Note that it does **not** restore the state of the fields that
/// it updates! This is intentional, since -- for the main
/// function -- we wish to be able to read the final
/// `outlives_environment` and other fields from the caller. For
/// closures, however, we save and restore any "scoped state"
/// before we invoke this function. (See `visit_fn` in the
/// `intravisit::Visitor` impl below.)
fn visit_fn_body(
&mut self,
id: ast::NodeId, // the id of the fn itself
body: &'gcx hir::Body,
span: Span,
) {
// When we enter a function, we can derive
debug!("visit_fn_body(id={})", id);
let body_id = body.id();
self.body_id = body_id.node_id;
let call_site = region::Scope {
id: body.value.hir_id.local_id,
data: region::ScopeData::CallSite,
};
self.call_site_scope = Some(call_site);
let fn_sig = {
let fn_hir_id = self.tcx.hir().node_to_hir_id(id);
match self.tables.borrow().liberated_fn_sigs().get(fn_hir_id) {
Some(f) => f.clone(),
None => {
bug!("No fn-sig entry for id={}", id);
}
}
};
// Collect the types from which we create inferred bounds.
// For the return type, if diverging, substitute `bool` just
// because it will have no effect.
//
// FIXME(#27579) return types should not be implied bounds
let fn_sig_tys: Vec<_> = fn_sig
.inputs()
.iter()
.cloned()
.chain(Some(fn_sig.output()))
.collect();
self.outlives_environment.add_implied_bounds(
self.fcx,
&fn_sig_tys[..],
body_id.node_id,
span,
);
self.outlives_environment
.save_implied_bounds(body_id.node_id);
self.link_fn_args(
region::Scope {
id: body.value.hir_id.local_id,
data: region::ScopeData::Node,
},
&body.arguments,
);
self.visit_body(body);
self.visit_region_obligations(body_id.node_id);
let call_site_scope = self.call_site_scope.unwrap();
debug!(
"visit_fn_body body.id {:?} call_site_scope: {:?}",
body.id(),
call_site_scope
);
let call_site_region = self.tcx.mk_region(ty::ReScope(call_site_scope));
let body_hir_id = self.tcx.hir().node_to_hir_id(body_id.node_id);
self.type_of_node_must_outlive(infer::CallReturn(span), body_hir_id, call_site_region);
self.constrain_opaque_types(
&self.fcx.opaque_types.borrow(),
self.outlives_environment.free_region_map(),
);
}
fn visit_region_obligations(&mut self, node_id: ast::NodeId) {
debug!("visit_region_obligations: node_id={}", node_id);
// region checking can introduce new pending obligations
// which, when processed, might generate new region
// obligations. So make sure we process those.
self.select_all_obligations_or_error();
}
fn resolve_regions_and_report_errors(&self, suppress: SuppressRegionErrors) {
self.infcx.process_registered_region_obligations(
self.outlives_environment.region_bound_pairs_map(),
self.implicit_region_bound,
self.param_env,
);
self.fcx.resolve_regions_and_report_errors(
self.subject_def_id,
&self.region_scope_tree,
&self.outlives_environment,
suppress,
);
}
fn constrain_bindings_in_pat(&mut self, pat: &hir::Pat) {
debug!("regionck::visit_pat(pat={:?})", pat);
pat.each_binding(|_, hir_id, span, _| {
// If we have a variable that contains region'd data, that
// data will be accessible from anywhere that the variable is
// accessed. We must be wary of loops like this:
//
// // from src/test/compile-fail/borrowck-lend-flow.rs
// let mut v = box 3, w = box 4;
// let mut x = &mut w;
// loop {
// **x += 1; // (2)
// borrow(v); //~ ERROR cannot borrow
// x = &mut v; // (1)
// }
//
// Typically, we try to determine the region of a borrow from
// those points where it is dereferenced. In this case, one
// might imagine that the lifetime of `x` need only be the
// body of the loop. But of course this is incorrect because
// the pointer that is created at point (1) is consumed at
// point (2), meaning that it must be live across the loop
// iteration. The easiest way to guarantee this is to require
// that the lifetime of any regions that appear in a
// variable's type enclose at least the variable's scope.
let var_scope = self.region_scope_tree.var_scope(hir_id.local_id);
let var_region = self.tcx.mk_region(ty::ReScope(var_scope));
let origin = infer::BindingTypeIsNotValidAtDecl(span);
self.type_of_node_must_outlive(origin, hir_id, var_region);
let typ = self.resolve_node_type(hir_id);
let body_id = self.body_id;
let _ = dropck::check_safety_of_destructor_if_necessary(
self, typ, span, body_id, var_scope,
);
})
}
}
impl<'a, 'gcx, 'tcx> Visitor<'gcx> for RegionCtxt<'a, 'gcx, 'tcx> {
// (..) FIXME(#3238) should use visit_pat, not visit_arm/visit_local,
// However, right now we run into an issue whereby some free
// regions are not properly related if they appear within the
// types of arguments that must be inferred. This could be
// addressed by deferring the construction of the region
// hierarchy, and in particular the relationships between free
// regions, until regionck, as described in #3238.
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'gcx> {
NestedVisitorMap::None
}
fn visit_fn(
&mut self,
fk: intravisit::FnKind<'gcx>,
_: &'gcx hir::FnDecl,
body_id: hir::BodyId,
span: Span,
id: ast::NodeId,
) {
assert!(
match fk {
intravisit::FnKind::Closure(..) => true,
_ => false,
},
"visit_fn invoked for something other than a closure"
);
// Save state of current function before invoking
// `visit_fn_body`. We will restore afterwards.
let old_body_id = self.body_id;
let old_call_site_scope = self.call_site_scope;
let env_snapshot = self.outlives_environment.push_snapshot_pre_closure();
let body = self.tcx.hir().body(body_id);
self.visit_fn_body(id, body, span);
// Restore state from previous function.
self.outlives_environment
.pop_snapshot_post_closure(env_snapshot);
self.call_site_scope = old_call_site_scope;
self.body_id = old_body_id;
}
//visit_pat: visit_pat, // (..) see above
fn visit_arm(&mut self, arm: &'gcx hir::Arm) {
// see above
for p in &arm.pats {
self.constrain_bindings_in_pat(p);
}
intravisit::walk_arm(self, arm);
}
fn | (&mut self, l: &'gcx hir::Local) {
// see above
self.constrain_bindings_in_pat(&l.pat);
self.link_local(l);
intravisit::walk_local(self, l);
}
fn visit_expr(&mut self, expr: &'gcx hir::Expr) {
debug!(
"regionck::visit_expr(e={:?}, repeating_scope={})",
expr, self.repeating_scope
);
// No matter what, the type of each expression must outlive the
// scope of that expression. This also guarantees basic WF.
let expr_ty = self.resolve_node_type(expr.hir_id);
// the region corresponding to this expression
let expr_region = self.tcx.mk_region(ty::ReScope(region::Scope {
id: expr.hir_id.local_id,
data: region::ScopeData::Node,
}));
self.type_must_outlive(
infer::ExprTypeIsNotInScope(expr_ty, expr.span),
expr_ty,
expr_region,
);
let is_method_call = self.tables.borrow().is_method_call(expr);
// If we are calling a method (either explicitly or via an
// overloaded operator), check that all of the types provided as
// arguments for its type parameters are well-formed, and all the regions
// provided as arguments outlive the call.
if is_method_call {
let origin = match expr.node {
hir::ExprKind::MethodCall(..) => infer::ParameterOrigin::MethodCall,
hir::ExprKind::Unary(op, _) if op == hir::UnDeref => {
infer::ParameterOrigin::OverloadedDeref
}
_ => infer::ParameterOrigin::OverloadedOperator,
};
let substs = self.tables.borrow().node_substs(expr.hir_id);
self.substs_wf_in_scope(origin, substs, expr.span, expr_region);
// Arguments (sub-expressions) are checked via `constrain_call`, below.
}
// Check any autoderefs or autorefs that appear.
let cmt_result = self.constrain_adjustments(expr);
// If necessary, constrain destructors in this expression. This will be
// the adjusted form if there is an adjustment.
match cmt_result {
Ok(head_cmt) => {
self.check_safety_of_rvalue_destructor_if_necessary(&head_cmt, expr.span);
}
Err(..) => {
self.tcx.sess.delay_span_bug(expr.span, "cat_expr Errd");
}
}
debug!(
"regionck::visit_expr(e={:?}, repeating_scope={}) - visiting subexprs",
expr, self.repeating_scope
);
match expr.node {
hir::ExprKind::Path(_) => {
let substs = self.tables.borrow().node_substs(expr.hir_id);
let origin = infer::ParameterOrigin::Path;
self.substs_wf_in_scope(origin, substs, expr.span, expr_region);
}
hir::ExprKind::Call(ref callee, ref args) => {
if is_method_call {
self.constrain_call(expr, Some(&callee), args.iter().map(|e| &*e));
} else {
self.constrain_callee(&callee);
self.constrain_call(expr, None, args.iter().map(|e| &*e));
}
intravisit::walk_expr(self, expr);
}
hir::ExprKind::MethodCall(.., ref args) => {
self.constrain_call(expr, Some(&args[0]), args[1..].iter().map(|e| &*e));
intravisit::walk_expr(self, expr);
}
hir::ExprKind::AssignOp(_, ref lhs, ref rhs) => {
if is_method_call {
self.constrain_call(expr, Some(&lhs), Some(&**rhs).into_iter());
}
intravisit::walk_expr(self, expr);
}
hir::ExprKind::Index(ref lhs, ref rhs) if is_method_call => {
self.constrain_call(expr, Some(&lhs), Some(&**rhs).into_iter());
intravisit::walk_expr(self, expr);
}
hir::ExprKind::Binary(_, ref lhs, ref rhs) if is_method_call => {
// As `ExprKind::MethodCall`, but the call is via an overloaded op.
self.constrain_call(expr, Some(&lhs), Some(&**rhs).into_iter());
intravisit::walk_expr(self, expr);
}
hir::ExprKind::Binary(_, ref lhs, ref rhs) => {
// If you do `x OP y`, then the types of `x` and `y` must
// outlive the operation you are performing.
let lhs_ty = self.resolve_expr_type_adjusted(&lhs);
let rhs_ty = self.resolve_expr_type_adjusted(&rhs);
for &ty in &[lhs_ty, rhs_ty] {
self.type_must_outlive(infer::Operand(expr.span), ty, expr_region);
}
intravisit::walk_expr(self, expr);
}
hir::ExprKind::Unary(hir::UnDeref, ref base) => {
// For *a, the lifetime of a must enclose the deref
if is_method_call {
self.constrain_call(expr, Some(base), None::<hir::Expr>.iter());
}
// For overloaded derefs, base_ty is the input to `Deref::deref`,
// but it's a reference type uing the same region as the output.
let base_ty = self.resolve_expr_type_adjusted(base);
if let ty::Ref(r_ptr, _, _) = base_ty.sty {
self.mk_subregion_due_to_dereference(expr.span, expr_region, r_ptr);
}
intravisit::walk_expr(self, expr);
}
hir::ExprKind::Unary(_, ref lhs) if is_method_call => {
// As above.
self.constrain_call(expr, Some(&lhs), None::<hir::Expr>.iter());
intravisit::walk_expr(self, expr);
}
hir::ExprKind::Index(ref vec_expr, _) => {
// For a[b], the lifetime of a must enclose the deref
let vec_type = self.resolve_expr_type_adjusted(&vec_expr);
self.constrain_index(expr, vec_type);
intravisit::walk_expr(self, expr);
}
hir::ExprKind::Cast(ref source, _) => {
// Determine if we are casting `source` to a trait
// instance. If so, we have to be sure that the type of
// the source obeys the trait's region bound.
self.constrain_cast(expr, &source);
intravisit::walk_expr(self, expr);
}
hir::ExprKind::AddrOf(m, ref base) => {
self.link_addr_of(expr, m, &base);
// Require that when you write a `&expr` expression, the
// resulting pointer has a lifetime that encompasses the
// `&expr` expression itself. Note that we constraining
// the type of the node expr.id here *before applying
// adjustments*.
//
// FIXME(https://github.com/rust-lang/rfcs/issues/811)
// nested method calls requires that this rule change
let ty0 = self.resolve_node_type(expr.hir_id);
self.type_must_outlive(infer::AddrOf(expr.span), ty0, expr_region);
intravisit::walk_expr(self, expr);
}
hir::ExprKind::Match(ref discr, ref arms, _) => {
self.link_match(&discr, &arms[..]);
intravisit::walk_expr(self, expr);
}
hir::ExprKind::Closure(.., body_id, _, _) => {
self.check_expr_fn_block(expr, body_id);
}
hir::ExprKind::Loop(ref body, _, _) => {
let repeating_scope = self.set_repeating_scope(body.id);
intravisit::walk_expr(self, expr);
self.set_repeating_scope(repeating_scope);
}
hir::ExprKind::While(ref cond, ref body, _) => {
let repeating_scope = self.set_repeating_scope(cond.id);
self.visit_expr(&cond);
self.set_repeating_scope(body.id);
self.visit_block(&body);
self.set_repeating_scope(repeating_scope);
}
hir::ExprKind::Ret(Some(ref ret_expr)) => {
let call_site_scope = self.call_site_scope;
debug!(
"visit_expr ExprKind::Ret ret_expr.id {} call_site_scope: {:?}",
ret_expr.id, call_site_scope
);
let call_site_region = self.tcx.mk_region(ty::ReScope(call_site_scope.unwrap()));
self.type_of_node_must_outlive(
infer::CallReturn(ret_expr.span),
ret_expr.hir_id,
call_site_region,
);
intravisit::walk_expr(self, expr);
}
_ => {
intravisit::walk_expr(self, expr);
}
}
}
}
impl<'a, 'gcx, 'tcx> RegionCtxt<'a, 'gcx, 'tcx> {
fn constrain_cast(&mut self, cast_expr: &hir::Expr, source_expr: &hir::Expr) {
debug!(
"constrain_cast(cast_expr={:?}, source_expr={:?})",
cast_expr, source_expr
);
let source_ty = self.resolve_node_type(source_expr.hir_id);
let target_ty = self.resolve_node_type(cast_expr.hir_id);
self.walk_cast(cast_expr, source_ty, target_ty);
}
fn walk_cast(&mut self, cast_expr: &hir::Expr, from_ty: Ty<'tcx>, to_ty: Ty<'tcx>) {
debug!("walk_cast(from_ty={:?}, to_ty={:?})", from_ty, to_ty);
match (&from_ty.sty, &to_ty.sty) {
/*From:*/
(&ty::Ref(from_r, from_ty, _), /*To: */ &ty::Ref(to_r, to_ty, _)) => {
// Target cannot outlive source, naturally.
self.sub_regions(infer::Reborrow(cast_expr.span), to_r, from_r);
self.walk_cast(cast_expr, from_ty, to_ty);
}
/*From:*/
(_, /*To: */ &ty::Dynamic(.., r)) => {
// When T is existentially quantified as a trait
// `Foo+'to`, it must outlive the region bound `'to`.
self.type_must_outlive(infer::RelateObjectBound(cast_expr.span), from_ty, r);
}
/*From:*/
(&ty::Adt(from_def, _), /*To: */ &ty::Adt(to_def, _))
if from_def.is_box() && to_def.is_box() =>
{
self.walk_cast(cast_expr, from_ty.boxed_ty(), to_ty.boxed_ty());
}
_ => {}
}
}
fn check_expr_fn_block(&mut self, expr: &'gcx hir::Expr, body_id: hir::BodyId) {
let repeating_scope = self.set_repeating_scope(body_id.node_id);
intravisit::walk_expr(self, expr);
self.set_repeating_scope(repeating_scope);
}
fn constrain_callee(&mut self, callee_expr: &hir::Expr) {
let callee_ty = self.resolve_node_type(callee_expr.hir_id);
match callee_ty.sty {
ty::FnDef(..) | ty::FnPtr(_) => {}
_ => {
// this should not happen, but it does if the program is
// erroneous
//
// bug!(
// callee_expr.span,
// "Calling non-function: {}",
// callee_ty);
}
}
}
fn constrain_call<'b, I: Iterator<Item = &'b hir::Expr>>(
&mut self,
call_expr: &hir::Expr,
receiver: Option<&hir::Expr>,
arg_exprs: I,
) {
//! Invoked on every call site (i.e., normal calls, method calls,
//! and overloaded operators). Constrains the regions which appear
//! in the type of the function. Also constrains the regions that
//! appear in the arguments appropriately.
debug!(
"constrain_call(call_expr={:?}, receiver={:?})",
call_expr, receiver
);
// `callee_region` is the scope representing the time in which the
// call occurs.
//
// FIXME(#6268) to support nested method calls, should be callee_id
let callee_scope = region::Scope {
id: call_expr.hir_id.local_id,
data: region::ScopeData::Node,
};
let callee_region = self.tcx.mk_region(ty::ReScope(callee_scope));
debug!("callee_region={:?}", callee_region);
for arg_expr in arg_exprs {
debug!("Argument: {:?}", arg_expr);
// ensure that any regions appearing in the argument type are
// valid for at least the lifetime of the function:
self.type_of_node_must_outlive(
infer::CallArg(arg_expr.span),
arg_expr.hir_id,
callee_region,
);
}
// as loop above, but for receiver
if let Some(r) = receiver {
debug!("receiver: {:?}", r);
self.type_of_node_must_outlive(infer::CallRcvr(r.span), r.hir_id, callee_region);
}
}
/// Creates a temporary `MemCategorizationContext` and pass it to the closure.
fn with_mc<F, R>(&self, f: F) -> R
where
F: for<'b> FnOnce(mc::MemCategorizationContext<'b, 'gcx, 'tcx>) -> R,
{
f(mc::MemCategorizationContext::with_infer(
&self.infcx,
&self.region_scope_tree,
&self.tables.borrow(),
))
}
/// Invoked on any adjustments that occur. Checks that if this is a region pointer being
/// dereferenced, the lifetime of the pointer includes the deref expr.
fn constrain_adjustments(&mut self, expr: &hir::Expr) -> mc::McResult<mc::cmt_<'tcx>> {
debug!("constrain_adjustments(expr={:?})", expr);
let mut cmt = self.with_mc(|mc| mc.cat_expr_unadjusted(expr))?;
let tables = self.tables.borrow();
let adjustments = tables.expr_adjustments(&expr);
if adjustments.is_empty() {
return Ok(cmt);
}
debug!("constrain_adjustments: adjustments={:?}", adjustments);
// If necessary, constrain destructors in the unadjusted form of this
// expression.
self.check_safety_of_rvalue_destructor_if_necessary(&cmt, expr.span);
let expr_region = self.tcx.mk_region(ty::ReScope(region::Scope {
id: expr.hir_id.local_id,
data: region::ScopeData::Node,
}));
for adjustment in adjustments {
debug!(
"constrain_adjustments: adjustment={:?}, cmt={:?}",
adjustment, cmt
);
if let adjustment::Adjust::Deref(Some(deref)) = adjustment.kind {
debug!("constrain_adjustments: overloaded deref: {:?}", deref);
// Treat overloaded autoderefs as if an AutoBorrow adjustment
// was applied on the base type, as that is always the case.
let input = self.tcx.mk_ref(
deref.region,
ty::TypeAndMut {
ty: cmt.ty,
mutbl: deref.mutbl,
},
);
let output = self.tcx.mk_ref(
deref.region,
ty::TypeAndMut {
ty: adjustment.target,
mutbl: deref.mutbl,
},
);
self.link_region(
expr.span,
deref.region,
ty::BorrowKind::from_mutbl(deref.mutbl),
&cmt,
);
// Specialized version of constrain_call.
self.type_must_outlive(infer::CallRcvr(expr.span), input, expr_region);
self.type_must_outlive(infer::CallReturn(expr.span), output, expr_region);
}
if let adjustment::Adjust::Borrow(ref autoref) = adjustment.kind {
self.link_autoref(expr, &cmt, autoref);
// Require that the resulting region encompasses
// the current node.
//
// FIXME(#6268) remove to support nested method calls
self.type_of_node_must_outlive(
infer::AutoBorrow(expr.span),
expr.hir_id,
expr_region,
);
}
cmt = self.with_mc(|mc| mc.cat_expr_adjusted(expr, cmt, &adjustment))?;
if let Categorization::Deref(_, mc::BorrowedPtr(_, r_ptr)) = cmt.cat {
self.mk_subregion_due_to_dereference(expr.span, expr_region, r_ptr);
}
}
Ok(cmt)
}
pub fn mk_subregion_due_to_dereference(
&mut self,
deref_span: Span,
minimum_lifetime: ty::Region<'tcx>,
maximum_lifetime: ty::Region<'tcx>,
) {
self.sub_regions(
infer::DerefPointer(deref_span),
minimum_lifetime,
maximum_lifetime,
)
}
fn check_safety_of_rvalue_destructor_if_necessary(&mut self, cmt: &mc::cmt_<'tcx>, span: Span) {
if let Categorization::Rvalue(region) = cmt.cat {
match *region {
ty::ReScope(rvalue_scope) => {
let typ = self.resolve_type(cmt.ty);
let body_id = self.body_id;
let _ = dropck::check_safety_of_destructor_if_necessary(
self,
typ,
span,
body_id,
rvalue_scope,
);
}
ty::ReStatic => {}
_ => {
span_bug!(
span,
"unexpected rvalue region in rvalue \
destructor safety checking: `{:?}`",
region
);
}
}
}
}
/// Invoked on any index expression that occurs. Checks that if this is a slice
/// being indexed, the lifetime of the pointer includes the deref expr.
fn constrain_index(&mut self, index_expr: &hir::Expr, indexed_ty: Ty<'tcx>) {
debug!(
"constrain_index(index_expr=?, indexed_ty={}",
self.ty_to_string(indexed_ty)
);
let r_index_expr = ty::ReScope(region::Scope {
id: index_expr.hir_id.local_id,
data: region::ScopeData::Node,
});
if let ty::Ref(r_ptr, r_ty, _) = indexed_ty.sty {
match r_ty.sty {
ty::Slice(_) | ty::Str => {
self.sub_regions(
infer::IndexSlice(index_expr.span),
self.tcx.mk_region(r_index_expr),
r_ptr,
);
}
_ => {}
}
}
}
/// Guarantees that any lifetimes which appear in the type of the node `id` (after applying
/// adjustments) are valid for at least `minimum_lifetime`
fn type_of_node_must_outlive(
&mut self,
origin: infer::SubregionOrigin<'tcx>,
hir_id: hir::HirId,
minimum_lifetime: ty::Region<'tcx>,
) {
// Try to resolve the type. If we encounter an error, then typeck
// is going to fail anyway, so just stop here and let typeck
// report errors later on in the writeback phase.
let ty0 = self.resolve_node_type(hir_id);
let ty = self.tables
.borrow()
.adjustments()
.get(hir_id)
.and_then(|adj| adj.last())
.map_or(ty0, |adj| adj.target);
let ty = self.resolve_type(ty);
debug!(
"constrain_regions_in_type_of_node(\
ty={}, ty0={}, id={:?}, minimum_lifetime={:?})",
ty, ty0, hir_id, minimum_lifetime
);
self.type_must_outlive(origin, ty, minimum_lifetime);
}
/// Adds constraints to inference such that `T: 'a` holds (or
/// reports an error if it cannot).
///
/// # Parameters
///
/// - `origin`, the reason we need this constraint
/// - `ty`, the type `T`
/// - `region`, the region `'a`
pub fn type_must_outlive(
&self,
origin: infer::SubregionOrigin<'tcx>,
ty: Ty<'tcx>,
region: ty::Region<'tcx>,
) {
self.infcx.register_region_obligation(
self.body_id,
RegionObligation {
sub_region: region,
sup_type: ty,
origin,
},
);
}
/// Computes the guarantor for an expression `&base` and then ensures that the lifetime of the
/// resulting pointer is linked to the lifetime of its guarantor (if any).
fn link_addr_of(&mut self, expr: &hir::Expr, mutability: hir::Mutability, base: &hir::Expr) {
debug!("link_addr_of(expr={:?}, base={:?})", expr, base);
let cmt = ignore_err!(self.with_mc(|mc| mc.cat_expr(base)));
debug!("link_addr_of: cmt={:?}", cmt);
self.link_region_from_node_type(expr.span, expr.hir_id, mutability, &cmt);
}
/// Computes the guarantors for any ref bindings in a `let` and
/// then ensures that the lifetime of the resulting pointer is
/// linked to the lifetime of the initialization expression.
fn link_local(&self, local: &hir::Local) {
debug!("regionck::for_local()");
let init_expr = match local.init {
None => {
return;
}
Some(ref expr) => &**expr,
};
let discr_cmt = Rc::new(ignore_err!(self.with_mc(|mc| mc.cat_expr(init_expr))));
self.link_pattern(discr_cmt, &local.pat);
}
/// Computes the guarantors for any ref bindings in a match and
/// then ensures that the lifetime of the resulting pointer is
/// linked to the lifetime of its guarantor (if any).
fn link_match(&self, discr: &hir::Expr, arms: &[hir::Arm]) {
debug!("regionck::for_match()");
let discr_cmt = Rc::new(ignore_err!(self.with_mc(|mc| mc.cat_expr(discr))));
debug!("discr_cmt={:?}", discr_cmt);
for arm in arms {
for root_pat in &arm.pats {
self.link_pattern(discr_cmt.clone(), &root_pat);
}
}
}
/// Computes the guarantors for any ref bindings in a match and
/// then ensures that the lifetime of the resulting pointer is
/// linked to the lifetime of its guarantor (if any).
fn link_fn_args(&self, body_scope: region::Scope, args: &[hir::Arg]) {
debug!("regionck::link_fn_args(body_scope={:?})", body_scope);
for arg in args {
let arg_ty = self.node_ty(arg.hir_id);
let re_scope = self.tcx.mk_region(ty::ReScope(body_scope));
let arg_cmt = self.with_mc(|mc| {
Rc::new(mc.cat_rvalue(arg.hir_id, arg.pat.span, re_scope, arg_ty))
});
debug!("arg_ty={:?} arg_cmt={:?} arg={:?}", arg_ty, arg_cmt, arg);
self.link_pattern(arg_cmt, &arg.pat);
}
}
/// Link lifetimes of any ref bindings in `root_pat` to the pointers found
/// in the discriminant, if needed.
fn link_pattern(&self, discr_cmt: mc::cmt<'tcx>, root_pat: &hir::Pat) {
debug!(
"link_pattern(discr_cmt={:?}, root_pat={:?})",
discr_cmt, root_pat
);
ignore_err!(self.with_mc(|mc| {
mc.cat_pattern(discr_cmt, root_pat, |sub_cmt, sub_pat| {
// `ref x` pattern
if let PatKind::Binding(..) = sub_pat.node {
if let Some(&bm) = mc.tables.pat_binding_modes().get(sub_pat.hir_id) {
if let ty::BindByReference(mutbl) = bm {
self.link_region_from_node_type(
sub_pat.span,
sub_pat.hir_id,
mutbl,
&sub_cmt,
);
}
} else {
self.tcx
.sess
.delay_span_bug(sub_pat.span, "missing binding mode");
}
}
})
}));
}
/// Link lifetime of borrowed pointer resulting from autoref to lifetimes in the value being
/// autoref'd.
fn link_autoref(
&self,
expr: &hir::Expr,
expr_cmt: &mc::cmt_<'tcx>,
autoref: &adjustment::AutoBorrow<'tcx>,
) {
debug!(
"link_autoref(autoref={:?}, expr_cmt={:?})",
autoref, expr_cmt
);
match *autoref {
adjustment::AutoBorrow::Ref(r, m) => {
self.link_region(expr.span, r, ty::BorrowKind::from_mutbl(m.into()), expr_cmt);
}
adjustment::AutoBorrow::RawPtr(m) => {
let r = self.tcx.mk_region(ty::ReScope(region::Scope {
id: expr.hir_id.local_id,
data: region::ScopeData::Node,
}));
self.link_region(expr.span, r, ty::BorrowKind::from_mutbl(m), expr_cmt);
}
}
}
/// Like `link_region()`, except that the region is extracted from the type of `id`,
/// which must be some reference (`&T`, `&str`, etc).
fn link_region_from_node_type(
&self,
span: Span,
id: hir::HirId,
mutbl: hir::Mutability,
cmt_borrowed: &mc::cmt_<'tcx>,
) {
debug!(
"link_region_from_node_type(id={:?}, mutbl={:?}, cmt_borrowed={:?})",
id, mutbl, cmt_borrowed
);
let rptr_ty = self.resolve_node_type(id);
if let ty::Ref(r, _, _) = rptr_ty.sty {
debug!("rptr_ty={}", rptr_ty);
self.link_region(span, r, ty::BorrowKind::from_mutbl(mutbl), cmt_borrowed);
}
}
/// Informs the inference engine that `borrow_cmt` is being borrowed with
/// kind `borrow_kind` and lifetime `borrow_region`.
/// In order to ensure borrowck is satisfied, this may create constraints
/// between regions, as explained in `link_reborrowed_region()`.
fn link_region(
&self,
span: Span,
borrow_region: ty::Region<'tcx>,
borrow_kind: ty::BorrowKind,
borrow_cmt: &mc::cmt_<'tcx>,
) {
let origin = infer::DataBorrowed(borrow_cmt.ty, span);
self.type_must_outlive(origin, borrow_cmt.ty, borrow_region);
let mut borrow_kind = borrow_kind;
let mut borrow_cmt_cat = borrow_cmt.cat.clone();
loop {
debug!(
"link_region(borrow_region={:?}, borrow_kind={:?}, borrow_cmt={:?})",
borrow_region, borrow_kind, borrow_cmt
);
match borrow_cmt_cat {
Categorization::Deref(ref_cmt, mc::BorrowedPtr(ref_kind, ref_region)) => {
match self.link_reborrowed_region(
span,
borrow_region,
borrow_kind,
ref_cmt,
ref_region,
ref_kind,
borrow_cmt.note,
) {
Some((c, k)) => {
borrow_cmt_cat = c.cat.clone();
borrow_kind = k;
}
None => {
return;
}
}
}
Categorization::Downcast(cmt_base, _)
| Categorization::Deref(cmt_base, mc::Unique)
| Categorization::Interior(cmt_base, _) => {
// Borrowing interior or owned data requires the base
// to be valid and borrowable in the same fashion.
borrow_cmt_cat = cmt_base.cat.clone();
borrow_kind = borrow_kind;
}
Categorization::Deref(_, mc::UnsafePtr(..))
| Categorization::StaticItem
| Categorization::Upvar(..)
| Categorization::Local(..)
| Categorization::ThreadLocal(..)
| Categorization::Rvalue(..) => {
// These are all "base cases" with independent lifetimes
// that are not subject to inference
return;
}
}
}
}
/// This is the most complicated case: the path being borrowed is
/// itself the referent of a borrowed pointer. Let me give an
/// example fragment of code to make clear(er) the situation:
///
/// let r: &'a mut T = ...; // the original reference "r" has lifetime 'a
/// ...
/// &'z *r // the reborrow has lifetime 'z
///
/// Now, in this case, our primary job is to add the inference
/// constraint that `'z <= 'a`. Given this setup, let's clarify the
/// parameters in (roughly) terms of the example:
///
/// ```plain,ignore (pseudo-Rust)
/// A borrow of: `& 'z bk * r` where `r` has type `& 'a bk T`
/// borrow_region ^~ ref_region ^~
/// borrow_kind ^~ ref_kind ^~
/// ref_cmt ^
/// ```
///
/// Here `bk` stands for some borrow-kind (e.g., `mut`, `uniq`, etc).
///
/// Unfortunately, there are some complications beyond the simple
/// scenario I just painted:
///
/// 1. The reference `r` might in fact be a "by-ref" upvar. In that
/// case, we have two jobs. First, we are inferring whether this reference
/// should be an `&T`, `&mut T`, or `&uniq T` reference, and we must
/// adjust that based on this borrow (e.g., if this is an `&mut` borrow,
/// then `r` must be an `&mut` reference). Second, whenever we link
/// two regions (here, `'z <= 'a`), we supply a *cause*, and in this
/// case we adjust the cause to indicate that the reference being
/// "reborrowed" is itself an upvar. This provides a nicer error message
/// should something go wrong.
///
/// 2. There may in fact be more levels of reborrowing. In the
/// example, I said the borrow was like `&'z *r`, but it might
/// in fact be a borrow like `&'z **q` where `q` has type `&'a
/// &'b mut T`. In that case, we want to ensure that `'z <= 'a`
/// and `'z <= 'b`. This is explained more below.
///
/// The return value of this function indicates whether we need to
/// recurse and process `ref_cmt` (see case 2 above).
fn link_reborrowed_region(
&self,
span: Span,
borrow_region: ty::Region<'tcx>,
borrow_kind: ty::BorrowKind,
ref_cmt: mc::cmt<'tcx>,
ref_region: ty::Region<'tcx>,
mut ref_kind: ty::BorrowKind,
note: mc::Note,
) -> Option<(mc::cmt<'tcx>, ty::BorrowKind)> {
// Possible upvar ID we may need later to create an entry in the
// maybe link map.
// Detect by-ref upvar `x`:
let cause = match note {
mc::NoteUpvarRef(ref upvar_id) => {
match self.tables.borrow().upvar_capture_map.get(upvar_id) {
Some(&ty::UpvarCapture::ByRef(ref upvar_borrow)) => {
// The mutability of the upvar may have been modified
// by the above adjustment, so update our local variable.
ref_kind = upvar_borrow.kind;
infer::ReborrowUpvar(span, *upvar_id)
}
_ => {
span_bug!(span, "Illegal upvar id: {:?}", upvar_id);
}
}
}
mc::NoteClosureEnv(ref upvar_id) => {
// We don't have any mutability changes to propagate, but
// we do want to note that an upvar reborrow caused this
// link
infer::ReborrowUpvar(span, *upvar_id)
}
_ => infer::Reborrow(span),
};
debug!(
"link_reborrowed_region: {:?} <= {:?}",
borrow_region, ref_region
);
self.sub_regions(cause, borrow_region, ref_region);
// If we end up needing to recurse and establish a region link
// with `ref_cmt`, calculate what borrow kind we will end up
// needing. This will be used below.
//
// One interesting twist is that we can weaken the borrow kind
// when we recurse: to reborrow an `&mut` referent as mutable,
// borrowck requires a unique path to the `&mut` reference but not
// necessarily a *mutable* path.
let new_borrow_kind = match borrow_kind {
ty::ImmBorrow => ty::ImmBorrow,
ty::MutBorrow | ty::UniqueImmBorrow => ty::UniqueImmBorrow,
};
// Decide whether we need to recurse and link any regions within
// the `ref_cmt`. This is concerned for the case where the value
// being reborrowed is in fact a borrowed pointer found within
// another borrowed pointer. For example:
//
// let p: &'b &'a mut T = ...;
// ...
// &'z **p
//
// What makes this case particularly tricky is that, if the data
// being borrowed is a `&mut` or `&uniq` borrow, borrowck requires
// not only that `'z <= 'a`, (as before) but also `'z <= 'b`
// (otherwise the user might mutate through the `&mut T` reference
// after `'b` expires and invalidate the borrow we are looking at
// now).
//
// So let's re-examine our parameters in light of this more
// complicated (possible) scenario:
//
// A borrow of: `& 'z bk * * p` where `p` has type `&'b bk & 'a bk T`
// borrow_region ^~ ref_region ^~
// borrow_kind ^~ ref_kind ^~
// ref_cmt ^~~
//
// (Note that since we have not examined `ref_cmt.cat`, we don't
// know whether this scenario has occurred; but I wanted to show
// how all the types get adjusted.)
match ref_kind {
ty::ImmBorrow => {
// The reference being reborrowed is a shareable ref of
// type `&'a T`. In this case, it doesn't matter where we
// *found* the `&T` pointer, the memory it references will
// be valid and immutable for `'a`. So we can stop here.
//
// (Note that the `borrow_kind` must also be ImmBorrow or
// else the user is borrowed imm memory as mut memory,
// which means they'll get an error downstream in borrowck
// anyhow.)
return None;
}
ty::MutBorrow | ty::UniqueImmBorrow => {
// The reference being reborrowed is either an `&mut T` or
// `&uniq T`. This is the case where recursion is needed.
return Some((ref_cmt, new_borrow_kind));
}
}
}
/// Checks that the values provided for type/region arguments in a given
/// expression are well-formed and in-scope.
fn substs_wf_in_scope(
&mut self,
origin: infer::ParameterOrigin,
substs: &Substs<'tcx>,
expr_span: Span,
expr_region: ty::Region<'tcx>,
) {
debug!(
"substs_wf_in_scope(substs={:?}, \
expr_region={:?}, \
origin={:?}, \
expr_span={:?})",
substs, expr_region, origin, expr_span
);
let origin = infer::ParameterInScope(origin, expr_span);
for region in substs.regions() {
self.sub_regions(origin.clone(), expr_region, region);
}
for ty in substs.types() {
let ty = self.resolve_type(ty);
self.type_must_outlive(origin.clone(), ty, expr_region);
}
}
}
| visit_local |
show.rs | use winit::dpi::PhysicalSize;
use winit::event::{ElementState, Event, KeyboardInput, VirtualKeyCode, WindowEvent};
use winit::event_loop::{ControlFlow, EventLoop};
use winit::window::Window;
// For create_buffer_init()
use wgpu::util::DeviceExt;
use futures::executor::block_on;
use std::ops::Rem;
use crate::commands::{AntiAliasing, Background, RenderCmd, TessellateCmd, Tessellator};
use lyon::algorithms::aabb::bounding_box;
use lyon::algorithms::hatching::*;
use lyon::geom::LineSegment;
use lyon::math::*;
use lyon::path::Path;
use lyon::tess2;
use lyon::tessellation;
use lyon::tessellation::geometry_builder::*;
use lyon::tessellation::{FillOptions, FillTessellator, StrokeTessellator};
const PRIM_BUFFER_LEN: usize = 64;
#[repr(C)]
#[derive(Copy, Clone)]
struct Globals {
resolution: [f32; 2],
scroll_offset: [f32; 2],
bg_color: [f32; 4],
vignette_color: [f32; 4],
zoom: f32,
_pad: [f32; 3],
}
unsafe impl bytemuck::Pod for Globals {}
unsafe impl bytemuck::Zeroable for Globals {}
#[repr(C)]
#[derive(Copy, Clone)]
struct GpuVertex {
position: [f32; 2],
normal: [f32; 2],
prim_id: u32,
}
unsafe impl bytemuck::Pod for GpuVertex {}
unsafe impl bytemuck::Zeroable for GpuVertex {}
#[repr(C)]
#[derive(Copy, Clone)]
struct Primitive {
color: [f32; 4],
translate: [f32; 2],
z_index: i32,
width: f32,
}
unsafe impl bytemuck::Pod for Primitive {}
unsafe impl bytemuck::Zeroable for Primitive {}
#[repr(C)]
#[derive(Copy, Clone)]
struct BgVertex {
point: [f32; 2],
}
unsafe impl bytemuck::Pod for BgVertex {}
unsafe impl bytemuck::Zeroable for BgVertex {}
const DEFAULT_WINDOW_WIDTH: f32 = 800.0;
const DEFAULT_WINDOW_HEIGHT: f32 = 800.0;
/// Creates a texture that uses MSAA and fits a given swap chain
fn create_multisampled_framebuffer(
device: &wgpu::Device,
desc: &wgpu::SurfaceConfiguration,
sample_count: u32,
) -> wgpu::TextureView {
let multisampled_frame_descriptor = &wgpu::TextureDescriptor {
label: Some("Multisampled frame descriptor"),
size: wgpu::Extent3d {
width: desc.width,
height: desc.height,
depth_or_array_layers: 1,
},
mip_level_count: 1,
sample_count,
dimension: wgpu::TextureDimension::D2,
format: desc.format,
usage: wgpu::TextureUsages::RENDER_ATTACHMENT,
};
device
.create_texture(multisampled_frame_descriptor)
.create_view(&wgpu::TextureViewDescriptor::default())
}
pub fn show_path(cmd: TessellateCmd, render_options: RenderCmd) {
let mut geometry: VertexBuffers<GpuVertex, u32> = VertexBuffers::new();
let fill_prim_id = 0;
let stroke_prim_id = 1;
let mut fill = FillTessellator::new();
let mut stroke = StrokeTessellator::new();
if let Some(options) = cmd.fill {
match cmd.tessellator {
Tessellator::Default => {
fill.tessellate(
&cmd.path,
&options,
&mut BuffersBuilder::new(&mut geometry, WithId(fill_prim_id)),
)
.unwrap();
//for (i, v) in geometry.vertices.iter().enumerate() {
// println!("{}: {:?}", i, v.position);
//}
//for i in 0..(geometry.indices.len() / 3) {
// println!(
// "{}/{}/{}",
// geometry.indices[i * 3],
// geometry.indices[i * 3 + 1],
// geometry.indices[i * 3 + 2],
// );
//}
}
Tessellator::Tess2 => {
tess2::FillTessellator::new()
.tessellate_path(
&cmd.path,
&options,
&mut tess2::geometry_builder::BuffersBuilder::new(&mut geometry, WithId(0)),
)
.unwrap();
}
}
}
if let Some(options) = cmd.stroke {
stroke
.tessellate_path(
&cmd.path,
&options,
&mut BuffersBuilder::new(&mut geometry, WithId(stroke_prim_id)),
)
.unwrap();
}
if let Some(hatch) = cmd.hatch {
let mut path = Path::builder();
let mut hatcher = Hatcher::new();
hatcher.hatch_path(
cmd.path.iter(),
&hatch.options,
&mut RegularHatchingPattern {
interval: hatch.spacing,
callback: &mut |segment: &HatchSegment| {
path.add_line_segment(&LineSegment {
from: segment.a.position,
to: segment.b.position,
});
},
},
);
let hatched_path = path.build();
stroke
.tessellate(
hatched_path.iter(),
&hatch.stroke,
&mut BuffersBuilder::new(&mut geometry, WithId(stroke_prim_id)),
)
.unwrap();
}
if let Some(dots) = cmd.dots {
let mut path = Path::builder();
let mut hatcher = Hatcher::new();
hatcher.dot_path(
cmd.path.iter(),
&dots.options,
&mut RegularDotPattern {
row_interval: dots.spacing,
column_interval: dots.spacing,
callback: &mut |dot: &Dot| {
path.add_point(dot.position);
},
},
);
let dotted_path = path.build();
stroke
.tessellate(
dotted_path.iter(),
&dots.stroke,
&mut BuffersBuilder::new(&mut geometry, WithId(stroke_prim_id)),
)
.unwrap();
}
let (bg_color, vignette_color) = match render_options.background {
Background::Blue => ([0.0, 0.47, 0.9, 1.0], [0.0, 0.1, 0.64, 1.0]),
Background::Clear => ([0.9, 0.9, 0.9, 1.0], [0.5, 0.5, 0.5, 1.0]),
Background::Dark => ([0.05, 0.05, 0.05, 1.0], [0.0, 0.0, 0.0, 1.0]),
};
if geometry.vertices.is_empty() {
println!("No geometry to show");
return;
}
let mut bg_geometry: VertexBuffers<BgVertex, u32> = VertexBuffers::new();
fill.tessellate_rectangle(
&Box2D { min: point(-1.0, -1.0), max: point(1.0, 1.0) },
&FillOptions::DEFAULT,
&mut BuffersBuilder::new(&mut bg_geometry, BgVertexCtor),
)
.unwrap();
let sample_count = match render_options.aa {
AntiAliasing::Msaa(samples) => samples as u32,
_ => 1,
};
let num_instances: u32 = PRIM_BUFFER_LEN as u32 - 1;
let mut cpu_primitives = Vec::with_capacity(PRIM_BUFFER_LEN);
for _ in 0..PRIM_BUFFER_LEN {
cpu_primitives.push(Primitive {
color: [1.0, 0.0, 0.0, 1.0],
z_index: 0,
width: 0.0,
translate: [0.0, 0.0],
});
}
// Stroke primitive
cpu_primitives[stroke_prim_id] = Primitive {
color: [0.0, 0.0, 0.0, 1.0],
z_index: num_instances as i32 + 2,
width: 1.0,
translate: [0.0, 0.0],
};
// Main fill primitive
cpu_primitives[fill_prim_id] = Primitive {
color: [1.0, 1.0, 1.0, 1.0],
z_index: num_instances as i32 + 1,
width: 0.0,
translate: [0.0, 0.0],
};
// Instance primitives
for (idx, cpu_prim) in cpu_primitives
.iter_mut()
.enumerate()
.skip(fill_prim_id + 1)
.take(num_instances as usize - 1)
{
cpu_prim.z_index = (idx as u32 + 1) as i32;
cpu_prim.color = [
(0.1 * idx as f32).rem(1.0),
(0.5 * idx as f32).rem(1.0),
(0.9 * idx as f32).rem(1.0),
1.0,
];
}
let aabb = bounding_box(cmd.path.iter());
let center = aabb.center().to_vector();
let mut scene = SceneParams {
target_zoom: 5.0,
zoom: 5.0,
target_scroll: center,
scroll: center,
show_points: false,
show_wireframe: false,
stroke_width: 1.0,
target_stroke_width: 1.0,
draw_background: true,
cursor_position: (0.0, 0.0),
window_size: PhysicalSize::new(DEFAULT_WINDOW_WIDTH as u32, DEFAULT_WINDOW_HEIGHT as u32),
size_changed: true,
};
let event_loop = EventLoop::new();
let window = Window::new(&event_loop).unwrap();
// create an instance
let instance = wgpu::Instance::new(wgpu::Backends::PRIMARY);
// create an surface
let surface = unsafe { instance.create_surface(&window) };
// create an adapter
let adapter = block_on(instance.request_adapter(&wgpu::RequestAdapterOptions {
power_preference: wgpu::PowerPreference::LowPower,
compatible_surface: Some(&surface),
force_fallback_adapter: false,
}))
.unwrap();
// create a device and a queue
let (device, queue) = block_on(adapter.request_device(
&wgpu::DeviceDescriptor {
label: None,
features: wgpu::Features::default(),
limits: wgpu::Limits::default(),
},
None,
))
.unwrap();
let vbo = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
label: None,
contents: bytemuck::cast_slice(&geometry.vertices),
usage: wgpu::BufferUsages::VERTEX,
});
let ibo = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
label: None,
contents: bytemuck::cast_slice(&geometry.indices),
usage: wgpu::BufferUsages::INDEX,
});
let bg_vbo = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
label: None,
contents: bytemuck::cast_slice(&bg_geometry.vertices),
usage: wgpu::BufferUsages::VERTEX,
});
let bg_ibo = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
label: None,
contents: bytemuck::cast_slice(&bg_geometry.indices),
usage: wgpu::BufferUsages::INDEX,
});
let prim_buffer_byte_size = (PRIM_BUFFER_LEN * std::mem::size_of::<Primitive>()) as u64;
let globals_buffer_byte_size = std::mem::size_of::<Globals>() as u64;
let prims_ubo = device.create_buffer(&wgpu::BufferDescriptor {
label: Some("Prims ubo"),
size: prim_buffer_byte_size,
usage: wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST,
mapped_at_creation: false,
});
let globals_ubo = device.create_buffer(&wgpu::BufferDescriptor {
label: Some("Globals ubo"),
size: globals_buffer_byte_size,
usage: wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST,
mapped_at_creation: false,
});
let vs_module = &device.create_shader_module(&wgpu::ShaderModuleDescriptor {
label: Some("Geometry vs"),
source: wgpu::ShaderSource::Wgsl(include_str!("./../shaders/geometry.vs.wgsl").into()),
});
let fs_module = &device.create_shader_module(&wgpu::ShaderModuleDescriptor {
label: Some("Geometry fs"),
source: wgpu::ShaderSource::Wgsl(include_str!("./../shaders/geometry.fs.wgsl").into()),
});
let bg_vs_module = &device.create_shader_module(&wgpu::ShaderModuleDescriptor {
label: Some("Background vs"),
source: wgpu::ShaderSource::Wgsl(include_str!("./../shaders/background.vs.wgsl").into()),
});
let bg_fs_module = &device.create_shader_module(&wgpu::ShaderModuleDescriptor {
label: Some("Background fs"),
source: wgpu::ShaderSource::Wgsl(include_str!("./../shaders/background.fs.wgsl").into()),
});
let bind_group_layout = device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {
label: Some("Bind group layout"),
entries: &[
wgpu::BindGroupLayoutEntry {
binding: 0,
visibility: wgpu::ShaderStages::VERTEX | wgpu::ShaderStages::FRAGMENT,
ty: wgpu::BindingType::Buffer {
ty: wgpu::BufferBindingType::Uniform,
has_dynamic_offset: false,
min_binding_size: wgpu::BufferSize::new(globals_buffer_byte_size),
},
count: None,
},
wgpu::BindGroupLayoutEntry {
binding: 1,
visibility: wgpu::ShaderStages::VERTEX,
ty: wgpu::BindingType::Buffer {
ty: wgpu::BufferBindingType::Uniform,
has_dynamic_offset: false,
min_binding_size: wgpu::BufferSize::new(prim_buffer_byte_size),
},
count: None,
},
],
});
let bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
label: Some("Bind group"),
layout: &bind_group_layout,
entries: &[
wgpu::BindGroupEntry {
binding: 0,
resource: wgpu::BindingResource::Buffer(globals_ubo.as_entire_buffer_binding()),
},
wgpu::BindGroupEntry {
binding: 1,
resource: wgpu::BindingResource::Buffer(prims_ubo.as_entire_buffer_binding()),
},
],
});
let pipeline_layout = device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor {
bind_group_layouts: &[&bind_group_layout],
push_constant_ranges: &[],
label: None,
});
let depth_stencil_state = Some(wgpu::DepthStencilState {
format: wgpu::TextureFormat::Depth32Float,
depth_write_enabled: true,
depth_compare: wgpu::CompareFunction::Greater,
stencil: wgpu::StencilState {
front: wgpu::StencilFaceState::IGNORE,
back: wgpu::StencilFaceState::IGNORE,
read_mask: 0,
write_mask: 0,
},
bias: wgpu::DepthBiasState::default(),
});
let mut render_pipeline_descriptor = wgpu::RenderPipelineDescriptor {
label: None,
layout: Some(&pipeline_layout),
vertex: wgpu::VertexState {
module: &vs_module,
entry_point: "main",
buffers: &[wgpu::VertexBufferLayout {
array_stride: std::mem::size_of::<GpuVertex>() as u64,
step_mode: wgpu::VertexStepMode::Vertex,
attributes: &[
wgpu::VertexAttribute {
offset: 0,
format: wgpu::VertexFormat::Float32x2,
shader_location: 0,
},
wgpu::VertexAttribute {
offset: 8,
format: wgpu::VertexFormat::Float32x2,
shader_location: 1,
},
wgpu::VertexAttribute {
offset: 16,
format: wgpu::VertexFormat::Uint32,
shader_location: 2,
},
],
}],
},
fragment: Some(wgpu::FragmentState {
module: &fs_module,
entry_point: "main",
targets: &[
wgpu::ColorTargetState {
format: wgpu::TextureFormat::Bgra8Unorm,
blend: None,
write_mask: wgpu::ColorWrites::ALL,
},
],
}),
primitive: wgpu::PrimitiveState {
topology: wgpu::PrimitiveTopology::TriangleList,
polygon_mode: wgpu::PolygonMode::Fill,
front_face: wgpu::FrontFace::Ccw,
strip_index_format: None,
cull_mode: Some(wgpu::Face::Back),
clamp_depth: false,
conservative: false,
},
depth_stencil: depth_stencil_state.clone(),
multisample: wgpu::MultisampleState {
count: sample_count,
mask: !0,
alpha_to_coverage_enabled: false,
},
};
let render_pipeline = device.create_render_pipeline(&render_pipeline_descriptor);
render_pipeline_descriptor.primitive.topology = wgpu::PrimitiveTopology::LineList;
let wireframe_render_pipeline = device.create_render_pipeline(&render_pipeline_descriptor);
let wireframe_indices = build_wireframe_indices(&geometry.indices);
let wireframe_ibo = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
label: None,
contents: bytemuck::cast_slice(&wireframe_indices),
usage: wgpu::BufferUsages::INDEX,
});
let bg_pipeline = device.create_render_pipeline(&wgpu::RenderPipelineDescriptor {
label: None,
layout: Some(&pipeline_layout),
vertex: wgpu::VertexState {
module: &bg_vs_module,
entry_point: "main",
buffers: &[wgpu::VertexBufferLayout {
array_stride: std::mem::size_of::<Point>() as u64,
step_mode: wgpu::VertexStepMode::Vertex,
attributes: &[wgpu::VertexAttribute {
offset: 0,
format: wgpu::VertexFormat::Float32x2,
shader_location: 0,
}],
}],
},
fragment: Some(wgpu::FragmentState {
module: &bg_fs_module,
entry_point: "main",
targets: &[
wgpu::ColorTargetState {
format: wgpu::TextureFormat::Bgra8Unorm,
blend: None,
write_mask: wgpu::ColorWrites::ALL,
},
],
}),
primitive: wgpu::PrimitiveState {
topology: wgpu::PrimitiveTopology::TriangleList,
polygon_mode: wgpu::PolygonMode::Fill,
front_face: wgpu::FrontFace::Ccw,
strip_index_format: None,
cull_mode: None,
clamp_depth: false,
conservative: false,
},
depth_stencil: depth_stencil_state.clone(),
multisample: wgpu::MultisampleState {
count: sample_count,
mask: !0,
alpha_to_coverage_enabled: false,
},
});
let size = window.inner_size();
let mut surface_desc = wgpu::SurfaceConfiguration {
usage: wgpu::TextureUsages::RENDER_ATTACHMENT,
format: wgpu::TextureFormat::Bgra8Unorm,
width: size.width,
height: size.height,
present_mode: wgpu::PresentMode::Mailbox,
};
let mut multisampled_render_target = None;
surface.configure(&device, &surface_desc);
let mut depth_texture_view = None;
let mut frame_count: f32 = 0.0;
event_loop.run(move |event, _, control_flow| {
if update_inputs(event, control_flow, &mut scene) {
// keep polling inputs.
return;
}
if scene.size_changed {
scene.size_changed = false;
let physical = scene.window_size;
surface_desc.width = physical.width;
surface_desc.height = physical.height;
surface.configure(&device, &surface_desc);
let depth_texture = device.create_texture(&wgpu::TextureDescriptor {
label: Some("Depth texture"),
size: wgpu::Extent3d {
width: surface_desc.width,
height: surface_desc.height,
depth_or_array_layers: 1,
},
mip_level_count: 1,
sample_count,
dimension: wgpu::TextureDimension::D2,
format: wgpu::TextureFormat::Depth32Float,
usage: wgpu::TextureUsages::RENDER_ATTACHMENT,
});
depth_texture_view =
Some(depth_texture.create_view(&wgpu::TextureViewDescriptor::default()));
multisampled_render_target = if sample_count > 1 {
Some(create_multisampled_framebuffer(
&device,
&surface_desc,
sample_count,
))
} else {
None
};
}
let frame = match surface.get_current_texture() {
Ok(frame) => frame,
Err(e) => {
println!("Swap-chain error: {:?}", e);
return;
}
};
let frame_view = frame.texture.create_view(&wgpu::TextureViewDescriptor::default());
let mut encoder = device.create_command_encoder(&wgpu::CommandEncoderDescriptor {
label: Some("Encoder"),
});
cpu_primitives[stroke_prim_id as usize].width = scene.stroke_width;
cpu_primitives[stroke_prim_id as usize].color = [
(frame_count * 0.008 - 1.6).sin() * 0.1 + 0.1,
(frame_count * 0.005 - 1.6).sin() * 0.1 + 0.1,
(frame_count * 0.01 - 1.6).sin() * 0.1 + 0.1,
1.0,
];
for idx in 2..(num_instances + 1) {
cpu_primitives[idx as usize].translate = [
(frame_count * 0.001 * idx as f32).sin() * (100.0 + idx as f32 * 10.0),
(frame_count * 0.002 * idx as f32).sin() * (100.0 + idx as f32 * 10.0),
];
}
queue.write_buffer(
&globals_ubo,
0,
bytemuck::cast_slice(&[Globals {
resolution: [
scene.window_size.width as f32,
scene.window_size.height as f32,
],
zoom: scene.zoom,
scroll_offset: scene.scroll.to_array(),
bg_color,
vignette_color,
_pad: [0.0; 3],
}]),
);
queue.write_buffer(&prims_ubo, 0, bytemuck::cast_slice(&cpu_primitives));
{
// A resolve target is only supported if the attachment actually uses anti-aliasing
// So if sample_count == 1 then we must render directly to the swapchain's buffer
let color_attachment = if let Some(msaa_target) = &multisampled_render_target {
wgpu::RenderPassColorAttachment {
view: msaa_target,
ops: wgpu::Operations {
load: wgpu::LoadOp::Clear(wgpu::Color::WHITE),
store: true,
},
resolve_target: Some(&frame_view),
}
} else {
wgpu::RenderPassColorAttachment {
view: &frame_view,
ops: wgpu::Operations {
load: wgpu::LoadOp::Clear(wgpu::Color::WHITE),
store: true,
},
resolve_target: None,
}
};
let mut pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {
label: None,
color_attachments: &[color_attachment],
depth_stencil_attachment: Some(wgpu::RenderPassDepthStencilAttachment {
view: depth_texture_view.as_ref().unwrap(),
depth_ops: Some(wgpu::Operations {
load: wgpu::LoadOp::Clear(0.0),
store: true,
}),
stencil_ops: Some(wgpu::Operations {
load: wgpu::LoadOp::Clear(0),
store: true,
}),
}),
});
let index_range;
if scene.show_wireframe {
pass.set_pipeline(&wireframe_render_pipeline);
pass.set_index_buffer(wireframe_ibo.slice(..), wgpu::IndexFormat::Uint32);
index_range = 0..(wireframe_indices.len() as u32);
} else {
pass.set_pipeline(&render_pipeline);
pass.set_index_buffer(ibo.slice(..), wgpu::IndexFormat::Uint32);
index_range = 0..(geometry.indices.len() as u32);
}
pass.set_bind_group(0, &bind_group, &[]);
pass.set_vertex_buffer(0, vbo.slice(..));
pass.draw_indexed(index_range, 0, 0..1);
if scene.draw_background {
pass.set_pipeline(&bg_pipeline);
pass.set_bind_group(0, &bind_group, &[]);
pass.set_index_buffer(bg_ibo.slice(..), wgpu::IndexFormat::Uint32);
pass.set_vertex_buffer(0, bg_vbo.slice(..));
pass.draw_indexed(0..6, 0, 0..1);
}
}
queue.submit(Some(encoder.finish()));
frame.present();
frame_count += 1.0;
});
}
fn build_wireframe_indices(indices: &[u32]) -> Vec<u32> {
let mut set = std::collections::HashSet::new();
let check = &mut |a: u32, b: u32| {
let (i1, i2) = if a < b { (a, b) } else { (b, a) };
set.insert((i1, i2))
};
let mut output = Vec::new();
for triangle in indices.chunks(3) {
let a = triangle[0];
let b = triangle[1];
let c = triangle[2];
if check(a, b) {
output.push(a);
output.push(b);
}
if check(b, c) {
output.push(b);
output.push(c);
}
if check(a, c) {
output.push(a);
output.push(c);
}
}
output
}
/// This vertex constructor forwards the positions and normals provided by the
/// tessellators and add a shape id.
pub struct WithId(pub usize);
impl FillVertexConstructor<GpuVertex> for WithId {
fn new_vertex(&mut self, vertex: tessellation::FillVertex) -> GpuVertex {
GpuVertex {
position: vertex.position().to_array(),
normal: [0.0, 0.0],
prim_id: self.0 as u32,
}
}
}
impl StrokeVertexConstructor<GpuVertex> for WithId {
fn new_vertex(&mut self, vertex: tessellation::StrokeVertex) -> GpuVertex {
let p = vertex.position_on_path();
GpuVertex {
position: p.to_array(),
normal: (vertex.position() - p).to_array(),
prim_id: self.0 as u32,
}
}
}
pub struct BgVertexCtor;
impl FillVertexConstructor<BgVertex> for BgVertexCtor {
fn new_vertex(&mut self, vertex: tessellation::FillVertex) -> BgVertex {
BgVertex {
point: vertex.position().to_array(),
}
}
}
impl tess2::geometry_builder::BasicVertexConstructor<GpuVertex> for WithId {
fn new_vertex(&mut self, position: Point) -> GpuVertex {
debug_assert!(!position.x.is_nan());
debug_assert!(!position.y.is_nan());
GpuVertex {
position: position.to_array(),
normal: [0.0, 0.0],
prim_id: self.0 as u32,
}
}
}
struct SceneParams {
target_zoom: f32,
zoom: f32,
target_scroll: Vector,
scroll: Vector,
show_points: bool,
show_wireframe: bool,
stroke_width: f32,
target_stroke_width: f32,
draw_background: bool,
cursor_position: (f32, f32),
window_size: PhysicalSize<u32>,
size_changed: bool,
}
fn update_inputs(
event: Event<()>,
control_flow: &mut ControlFlow,
scene: &mut SceneParams,
) -> bool | {
match event {
Event::MainEventsCleared => {
return false;
}
Event::WindowEvent {
event: WindowEvent::Destroyed,
..
}
| Event::WindowEvent {
event: WindowEvent::CloseRequested,
..
} => {
*control_flow = ControlFlow::Exit;
return false;
}
Event::WindowEvent {
event: WindowEvent::CursorMoved { position, .. },
..
} => {
scene.cursor_position = (position.x as f32, position.y as f32);
}
Event::WindowEvent {
event: WindowEvent::Resized(size),
..
} => {
scene.window_size = size;
scene.size_changed = true
}
Event::WindowEvent {
event:
WindowEvent::KeyboardInput {
input:
KeyboardInput {
state: ElementState::Pressed,
virtual_keycode: Some(key),
..
},
..
},
..
} => match key {
VirtualKeyCode::Escape => {
*control_flow = ControlFlow::Exit;
return false;
}
VirtualKeyCode::PageDown => {
scene.target_zoom *= 0.8;
}
VirtualKeyCode::PageUp => {
scene.target_zoom *= 1.25;
}
VirtualKeyCode::Left => {
scene.target_scroll.x -= 50.0 / scene.target_zoom;
}
VirtualKeyCode::Right => {
scene.target_scroll.x += 50.0 / scene.target_zoom;
}
VirtualKeyCode::Up => {
scene.target_scroll.y -= 50.0 / scene.target_zoom;
}
VirtualKeyCode::Down => {
scene.target_scroll.y += 50.0 / scene.target_zoom;
}
VirtualKeyCode::P => {
scene.show_points = !scene.show_points;
}
VirtualKeyCode::W => {
scene.show_wireframe = !scene.show_wireframe;
}
VirtualKeyCode::B => {
scene.draw_background = !scene.draw_background;
}
VirtualKeyCode::A => {
scene.target_stroke_width /= 0.8;
}
VirtualKeyCode::Z => {
scene.target_stroke_width *= 0.8;
}
_key => {}
},
_evt => {
//println!("{:?}", _evt);
}
}
//println!(" -- zoom: {}, scroll: {:?}", scene.target_zoom, scene.target_scroll);
scene.zoom += (scene.target_zoom - scene.zoom) / 3.0;
scene.scroll = scene.scroll + (scene.target_scroll - scene.scroll) / 3.0;
scene.stroke_width =
scene.stroke_width + (scene.target_stroke_width - scene.stroke_width) / 5.0;
*control_flow = ControlFlow::Poll;
true
} |
|
info.py | import struct
from hsdecomp import ptrutil
def | (settings, address):
num_args = read_num_args(settings, address)
func_type = read_function_type(settings, address)
assert num_args >= len(func_type)
return func_type + 'v' * (num_args - len(func_type))
def read_num_args(settings, address):
return ptrutil.read_half_word(settings, settings.text_offset + address - settings.rt.halfword.size*5)
def read_function_type(settings, address):
type_table = {
3: '',
4: 'n',
5: 'p',
12: 'nn',
13: 'np',
14: 'pn',
15: 'pp',
16: 'nnn',
17: 'nnp',
18: 'npn',
19: 'npp',
20: 'pnn',
21: 'pnp',
22: 'ppn',
23: 'ppp',
24: 'pppp',
25: 'ppppp',
26: 'pppppp',
27: 'ppppppp',
28: 'pppppppp'
}
type = ptrutil.read_half_word(settings, settings.text_offset + address - settings.rt.halfword.size*6)
if type >= 12 and settings.version < (7, 8, 0):
# Introduction of vector arguments
type += 3
if type in type_table:
return type_table[type]
elif type == 0:
bitmap = ptrutil.read_word(settings, settings.text_offset + address - settings.rt.word.size*5)
size = bitmap & (settings.word.size - 1)
bits = bitmap >> settings.word.lg_size
ret = ''
for i in range(size):
if bits % 2 == 0:
ret += 'p'
else:
ret += 'n'
bits //= 2
return ret
else:
# TODO: Read large bitmaps
assert False, "unknown function type"
def read_closure_type(settings, address):
type_table = {
1: 'constructor',
2: 'constructor (1 ptr, 0 nonptr)',
3: 'constructor (0 ptr, 1 nonptr)',
4: 'constructor (2 ptr, 0 nonptr)',
5: 'constructor (1 ptr, 1 nonptr)',
6: 'constructor (0 ptr, 2 nonptr)',
7: 'constructor (static)',
8: 'constructor (no CAF, static)',
9: 'function',
10: 'function (1 ptr, 0 nonptr)',
11: 'function (0 ptr, 1 nonptr)',
12: 'function (2 ptr, 0 nonptr)',
13: 'function (1 ptr, 1 nonptr)',
14: 'function (0 ptr, 2 nonptr)',
15: 'function (static)',
16: 'thunk',
17: 'thunk (1 ptr, 0 nonptr)',
18: 'thunk (0 ptr, 1 nonptr)',
19: 'thunk (2 ptr, 0 nonptr)',
20: 'thunk (1 ptr, 1 nonptr)',
21: 'thunk (0 ptr, 2 nonptr)',
22: 'thunk (static)',
23: 'selector',
28: 'indirection',
29: 'indirection (permanent)',
30: 'indirection (static)'
}
type = ptrutil.read_half_word(settings, settings.text_offset + address - settings.rt.halfword.size*2)
if type in type_table:
return type_table[type]
else:
return 'unknown: ' + str(type)
| read_arg_pattern |
prime-sum-sieve-overestimate.rs | extern crate primal;
fn main() {
let ns = (1..100 + 1).map(|x| x * 100_000).collect::<Vec<_>>();
// find the primes up to this upper bound
let sieve = primal::Sieve::new(10_000_000_000);
| .map(|n| sieve.nth_prime(*n))
.fold(0, |a, b| a + b);
println!("the sum is {}", sum);
} | // now we can efficiently sum them up
let sum = ns.iter() |
extra_functions.py | import numpy as np
import cv2
import matplotlib.image as mpimg
def perspect_transform(img, src, dst):
# Get transform matrix using cv2.getPerspectivTransform()
M = cv2.getPerspectiveTransform(src, dst)
# Warp image using cv2.warpPerspective()
# keep same size as input image
warped = cv2.warpPerspective(img, M, (img.shape[1], img.shape[0]))
# Return the result
return warped
def color_thresh(img, rgb_thresh=(160, 160, 160)):
# Create an array of zeros same xy size as img, but single channel
color_select = np.zeros_like(img[:, :, 0])
# Require that each pixel be above all thre threshold values in RGB
# above_thresh will now contain a boolean array with "True"
# where threshold was met
above_thresh = (img[:, :, 0] > rgb_thresh[0]) \
& (img[:, :, 1] > rgb_thresh[1]) \
& (img[:, :, 2] > rgb_thresh[2])
# Index the array of zeros with the boolean array and set to 1 | color_select[above_thresh] = 1
# Return the binary image
return color_select
image_name = '../data/IMG/robocam_2017_10_03_15_35_32_475.jpg'
image = mpimg.imread(image_name)
# Define calibration box in source (actual) and destination (desired) coordinates
# These source and destination points are defined to warp the image
# to a grid where each 10x10 pixel square represents 1 square meter
dst_size = 5
# Set a bottom offset to account for the fact that the bottom of the image
# is not the position of the rover but a bit in front of it
bottom_offset = 6
source = np.float32([[35, 135], [120, 97], [202, 97], [300, 135]])
destination = np.float32([[image.shape[1] / 2 - dst_size, image.shape[0] - bottom_offset],
[image.shape[1] / 2 - dst_size, image.shape[0] -
bottom_offset - 2 * dst_size],
[image.shape[1] / 2 + dst_size, image.shape[0] -
bottom_offset - 2 * dst_size],
[image.shape[1] / 2 + dst_size, image.shape[0] - bottom_offset]]) | |
lisp.py | import math
import operator as op
from collections import ChainMap as Environment
Symbol = str
List = list
Number = (int, float)
def parse(program: str):
return read_from_tokens(tokenize(program))
def tokenize(raw):
return raw.replace('(', ' ( ').replace(')', ' ) ').split()
def read_from_tokens(tokens: list):
if not len(tokens):
raise SyntaxError('unexpected EOF')
head = tokens.pop(0)
if head == '(':
L = []
while tokens[0] != ')':
L.append(read_from_tokens(tokens))
tokens.pop(0)
return L
elif head == ')':
raise SyntaxError('unexpected )')
else:
return atom(head)
def atom(token: str) -> 'Atom':
try:
return int(token)
except ValueError:
try:
return float(token)
except ValueError:
return Symbol(token)
def gen_env() -> dict:
env = {}
env.update(vars(math))
env.update({
'+': op.add,
'-': op.sub,
'*': op.mul,
'/': op.truediv,
'>': op.gt,
'<': op.lt,
'>=': op.ge,
'<=': op.le,
'=': op.eq,
'abs': abs,
'append': op.add,
'apply': lambda proc, args: proc(*args),
'begin': lambda *x: x[-1],
'car': lambda x: x[0],
'cdr': lambda x: x[1:],
'cons': lambda x, y: [x] + y,
'eq?': op.is_,
'equal?': op.eq,
'length': len,
'list': lambda *x: list(x),
'list?': lambda x: isinstance(x, list),
'map': lambda *args: list(map(*args)),
'max': max,
'min': min,
'not': op.not_,
'null?': lambda x: x == [],
'number?': lambda x: isinstance(x, Number),
'procedure?': callable,
'round': round,
'symbol?': lambda x: isinstance(x, Symbol),
})
return env
global_env = gen_env()
def eval(x, env=global_env):
if isinstance(x, Symbol):
return env[x]
elif isinstance(x, Number):
return x
elif x[0] == 'quote':
_, exp = x
return exp
elif x[0] == 'if':
_, test, conseq, alt = x
exp = (conseq if eval(test, env) else alt)
return eval(exp, env)
elif x[0] == 'define':
_, symbol, exp = x
env[symbol] = eval(exp, env)
elif x[0] == 'lambda':
_, parms, body = x
return Procedure(parms, body, env)
else:
proc = eval(x[0], env)
args = [eval(arg, env) for arg in x[1:]]
return proc(*args)
def repl(prompt='lispy>> '):
while True:
val = eval(parse(input(prompt)))
if val is not None:
print(schemestr(val))
def schemestr(exp):
if isinstance(exp, List):
return '(' + ' '.join(map(schemestr, exp)) + ')'
else:
return str(exp)
class | (object):
def __init__(self, parms, body, env):
self.parms, self.body, self.env = parms, body, env
def __call__(self, *args):
env = Environment(dict(zip(self.parms, args)), self.env)
return eval(self.body, env)
repl()
| Procedure |
gtag.js | // Copyright 2012 Google Inc. All rights reserved.
(function(){
var data = {
"resource": {
"version":"1",
"macros":[],
"tags":[],
"predicates":[],
"rules":[]
},
"runtime":[
[],[]
]
};
var f,aa=this,ha=function(){if(null===ea){var a;a:{var b=aa.document,c=b.querySelector&&b.querySelector("script[nonce]");if(c){var d=c.nonce||c.getAttribute("nonce");if(d&&fa.test(d)){a=d;break a}}a=null}ea=a||""}return ea},fa=/^[\w+/_-]+[=]{0,2}$/,ea=null,ia=function(a,b){function c(){}c.prototype=b.prototype;a.tf=b.prototype;a.prototype=new c;a.prototype.constructor=a;a.$e=function(a,c,g){for(var d=Array(arguments.length-2),e=2;e<arguments.length;e++)d[e-2]=arguments[e];return b.prototype[c].apply(a,
d)}};var v=function(a,b){this.C=a;this.yd=b};v.prototype.Md=function(){return this.C};v.prototype.getData=function(){return this.yd};v.prototype.getData=v.prototype.getData;v.prototype.getType=v.prototype.Md;var ka=function(a){return"number"===typeof a&&0<=a&&isFinite(a)&&0===a%1||"string"===typeof a&&"-"!==a[0]&&a===""+parseInt(a,10)},la=function(){this.ka={};this.Ba=!1};la.prototype.get=function(a){return this.ka["dust."+a]};la.prototype.set=function(a,b){!this.Ba&&(this.ka["dust."+a]=b)};la.prototype.has=function(a){return this.ka.hasOwnProperty("dust."+a)};var ma=function(a){var b=[],c;for(c in a.ka)a.ka.hasOwnProperty(c)&&b.push(c.substr(5));return b};
la.prototype.remove=function(a){!this.Ba&&delete this.ka["dust."+a]};la.prototype.M=function(){this.Ba=!0};var w=function(a){this.na=new la;this.i=[];a=a||[];for(var b in a)a.hasOwnProperty(b)&&(ka(b)?this.i[Number(b)]=a[Number(b)]:this.na.set(b,a[b]))};f=w.prototype;f.toString=function(){for(var a=[],b=0;b<this.i.length;b++){var c=this.i[b];null===c||void 0===c?a.push(""):a.push(c.toString())}return a.join(",")};f.set=function(a,b){if("length"==a){if(!ka(b))throw"RangeError: Length property must be a valid integer.";this.i.length=Number(b)}else ka(a)?this.i[Number(a)]=b:this.na.set(a,b)};
f.get=function(a){return"length"==a?this.length():ka(a)?this.i[Number(a)]:this.na.get(a)};f.length=function(){return this.i.length};f.V=function(){for(var a=ma(this.na),b=0;b<this.i.length;b++)a.push(b+"");return new w(a)};f.remove=function(a){ka(a)?delete this.i[Number(a)]:this.na.remove(a)};f.pop=function(){return this.i.pop()};f.push=function(a){return this.i.push.apply(this.i,Array.prototype.slice.call(arguments))};f.shift=function(){return this.i.shift()};
f.splice=function(a,b,c){return new w(this.i.splice.apply(this.i,arguments))};f.unshift=function(a){return this.i.unshift.apply(this.i,Array.prototype.slice.call(arguments))};f.has=function(a){return ka(a)&&this.i.hasOwnProperty(a)||this.na.has(a)};w.prototype.unshift=w.prototype.unshift;w.prototype.splice=w.prototype.splice;w.prototype.shift=w.prototype.shift;w.prototype.push=w.prototype.push;w.prototype.pop=w.prototype.pop;w.prototype.remove=w.prototype.remove;w.prototype.getKeys=w.prototype.V;
w.prototype.get=w.prototype.get;w.prototype.set=w.prototype.set;var na=function(){function a(a,b){c[a]=b}function b(){c={};g=!1}var c={},d,e={},g=!1,h={add:a,Xb:function(a,b,c){e[a]||(e[a]={});e[a][b]=c},create:function(e){var h={add:a,assert:function(a,b){if(!g){var h=c[a]||d;h&&h.apply(e,Array.prototype.slice.call(arguments,0))}},reset:b};h.add=h.add;h.assert=h.assert;h.reset=h.reset;return h},yc:function(a){return e[a]?(b(),c=e[a],!0):!1},oa:function(a){d=a},reset:b,Ic:function(a){g=a}};h.add=h.add;h.addToCache=h.Xb;h.loadFromCache=h.yc;h.registerDefaultPermission=
h.oa;h.reset=h.reset;h.setPermitAllAsserts=h.Ic;return h};var oa=function(){function a(a,c){if(b[a]){if(b[a].Ra+c>b[a].max)throw Error("Quota exceeded");b[a].Ra+=c}}var b={},c=void 0,d=void 0,e={ne:function(a){c=a},Yb:function(){c&&a(c,1)},oe:function(a){d=a},U:function(b){d&&a(d,b)},Je:function(a,c){b[a]=b[a]||{Ra:0};b[a].max=c},Ld:function(a){return b[a]&&b[a].Ra||0},reset:function(){b={}},sd:a};e.onFnConsume=e.ne;e.consumeFn=e.Yb;e.onStorageConsume=e.oe;e.consumeStorage=e.U;e.setMax=e.Je;e.getConsumed=e.Ld;e.reset=e.reset;e.consume=e.sd;return e};var pa=function(a,b,c){this.N=a;this.I=b;this.Y=c;this.i=new la};f=pa.prototype;f.add=function(a,b){this.i.Ba||(this.N.U(("string"===typeof a?a.length:1)+("string"===typeof b?b.length:1)),this.i.set(a,b))};f.set=function(a,b){this.i.Ba||(this.Y&&this.Y.has(a)?this.Y.set(a,b):(this.N.U(("string"===typeof a?a.length:1)+("string"===typeof b?b.length:1)),this.i.set(a,b)))};f.get=function(a){return this.i.has(a)?this.i.get(a):this.Y?this.Y.get(a):void 0};
f.has=function(a){return!!this.i.has(a)||!(!this.Y||!this.Y.has(a))};f.K=function(){return this.N};f.M=function(){this.i.M()};pa.prototype.has=pa.prototype.has;pa.prototype.get=pa.prototype.get;pa.prototype.set=pa.prototype.set;pa.prototype.add=pa.prototype.add;var qa=function(){},ra=function(a){return"function"==typeof a},sa=function(a){return"string"==typeof a},ta=function(a){return"number"==typeof a&&!isNaN(a)},ua=function(a){return"[object Array]"==Object.prototype.toString.call(Object(a))},va=function(a,b){if(Array.prototype.indexOf){var c=a.indexOf(b);return"number"==typeof c?c:-1}for(var d=0;d<a.length;d++)if(a[d]===b)return d;return-1},wa=function(a,b){if(!ta(a)||!ta(b)||a>b)a=0,b=2147483647;return Math.floor(Math.random()*(b-a+1)+a)},ya=function(a){return Math.round(Number(a))||
0},Aa=function(a){return"false"==String(a).toLowerCase()?!1:!!a},Ba=function(a){var b=[];if(ua(a))for(var c=0;c<a.length;c++)b.push(String(a[c]));return b},Ca=function(a){return a?a.replace(/^\s+|\s+$/g,""):""},Da=function(){return(new Date).getTime()},Ea=function(){this.prefix="gtm.";this.values={}};Ea.prototype.set=function(a,b){this.values[this.prefix+a]=b};Ea.prototype.get=function(a){return this.values[this.prefix+a]};Ea.prototype.contains=function(a){return void 0!==this.get(a)};
var Fa=function(a,b,c){return a&&a.hasOwnProperty(b)?a[b]:c},Ga=function(a){var b=!1;return function(){if(!b)try{a()}catch(c){}b=!0}},Ha=function(a,b){for(var c in b)b.hasOwnProperty(c)&&(a[c]=b[c])},Ia=function(a){for(var b in a)if(a.hasOwnProperty(b))return!0;return!1};var x=function(a,b){la.call(this);this.zc=a;this.Id=b};ia(x,la);var Ka=function(a,b){for(var c,d=0;d<b.length&&!(c=Ja(a,b[d]),c instanceof v);d++);return c},Ja=function(a,b){var c=a.get(String(b[0]));if(!(c&&c instanceof x))throw"Attempting to execute non-function "+b[0]+".";return c.o.apply(c,[a].concat(b.slice(1)))};x.prototype.toString=function(){return this.zc};x.prototype.getName=function(){return this.zc};x.prototype.getName=x.prototype.getName;x.prototype.V=function(){return new w(ma(this))};
x.prototype.getKeys=x.prototype.V;x.prototype.o=function(a,b){var c,d={F:function(){return a},evaluate:function(b){var c=a;return ua(b)?Ja(c,b):b},xa:function(b){return Ka(a,b)},K:function(){return a.K()},ya:function(){c||(c=a.I.create(d));return c}};a.K().Yb();return this.Id.apply(d,Array.prototype.slice.call(arguments,1))};x.prototype.invoke=x.prototype.o;var La=function(){la.call(this)};ia(La,la);La.prototype.V=function(){return new w(ma(this))};La.prototype.getKeys=La.prototype.V;/*
jQuery v1.9.1 (c) 2005, 2012 jQuery Foundation, Inc. jquery.org/license. */
var Ma=/\[object (Boolean|Number|String|Function|Array|Date|RegExp)\]/,Na=function(a){if(null==a)return String(a);var b=Ma.exec(Object.prototype.toString.call(Object(a)));return b?b[1].toLowerCase():"object"},Oa=function(a,b){return Object.prototype.hasOwnProperty.call(Object(a),b)},Pa=function(a){if(!a||"object"!=Na(a)||a.nodeType||a==a.window)return!1;try{if(a.constructor&&!Oa(a,"constructor")&&!Oa(a.constructor.prototype,"isPrototypeOf"))return!1}catch(c){return!1}for(var b in a);return void 0===
b||Oa(a,b)},Qa=function(a,b){var c=b||("array"==Na(a)?[]:{}),d;for(d in a)if(Oa(a,d)){var e=a[d];"array"==Na(e)?("array"!=Na(c[d])&&(c[d]=[]),c[d]=Qa(e,c[d])):Pa(e)?(Pa(c[d])||(c[d]={}),c[d]=Qa(e,c[d])):c[d]=e}return c};var Ra=function(a){if(a instanceof w){for(var b=[],c=a.length(),d=0;d<c;d++)a.has(d)&&(b[d]=Ra(a.get(d)));return b}if(a instanceof La){for(var e={},g=a.V(),h=g.length(),k=0;k<h;k++)e[g.get(k)]=Ra(a.get(g.get(k)));return e}return a instanceof x?function(){for(var b=Array.prototype.slice.call(arguments,0),c=0;c<b.length;c++)b[c]=Sa(b[c]);var d=new pa(oa(),na());return Ra(a.o.apply(a,[d].concat(b)))}:a},Sa=function(a){if(ua(a)){for(var b=[],c=0;c<a.length;c++)a.hasOwnProperty(c)&&(b[c]=Sa(a[c]));return new w(b)}if(Pa(a)){var d=
new La,e;for(e in a)a.hasOwnProperty(e)&&d.set(e,Sa(a[e]));return d}if("function"===typeof a)return new x("",function(b){for(var c=Array.prototype.slice.call(arguments,0),d=0;d<c.length;d++)c[d]=Ra(this.evaluate(c[d]));return Sa(a.apply(a,c))});var g=typeof a;if(null===a||"string"===g||"number"===g||"boolean"===g)return a};var Ta={control:function(a,b){return new v(a,this.evaluate(b))},fn:function(a,b,c){var d=this.F(),e=this.evaluate(b);if(!(e instanceof w))throw"Error: non-List value given for Fn argument names.";var g=Array.prototype.slice.call(arguments,2);this.K().U(a.length+g.length);return new x(a,function(){return function(a){for(var b=new pa(d.N,d.I,d),c=Array.prototype.slice.call(arguments,0),h=0;h<c.length;h++)if(c[h]=this.evaluate(c[h]),c[h]instanceof v)return c[h];for(var n=e.get("length"),p=0;p<n;p++)p<
c.length?b.set(e.get(p),c[p]):b.set(e.get(p),void 0);b.set("arguments",new w(c));var q=Ka(b,g);if(q instanceof v)return"return"===q.C?q.getData():q}}())},list:function(a){var b=this.K();b.U(arguments.length);for(var c=new w,d=0;d<arguments.length;d++){var e=this.evaluate(arguments[d]);"string"===typeof e&&b.U(e.length?e.length-1:0);c.push(e)}return c},map:function(a){for(var b=this.K(),c=new La,d=0;d<arguments.length-1;d+=2){var e=this.evaluate(arguments[d])+"",g=this.evaluate(arguments[d+1]),h=e.length;
h+="string"===typeof g?g.length:1;b.U(h);c.set(e,g)}return c},undefined:function(){}};var Ua=function(){this.N=oa();this.I=na();this.za=new pa(this.N,this.I)};f=Ua.prototype;f.T=function(a,b){var c=new x(a,b);c.M();this.za.set(a,c)};f.Wb=function(a,b){Ta.hasOwnProperty(a)&&this.T(b||a,Ta[a])};f.K=function(){return this.N};f.Za=function(){this.N=oa();this.za.N=this.N};f.Ge=function(){this.I=na();this.za.I=this.I};f.L=function(a,b){var c=Array.prototype.slice.call(arguments,0);return this.zb(c)};
f.zb=function(a){for(var b,c=0;c<arguments.length;c++){var d=Ja(this.za,arguments[c]);b=d instanceof v||d instanceof x||d instanceof w||d instanceof La||null===d||void 0===d||"string"===typeof d||"number"===typeof d||"boolean"===typeof d?d:void 0}return b};f.M=function(){this.za.M()};Ua.prototype.makeImmutable=Ua.prototype.M;Ua.prototype.run=Ua.prototype.zb;Ua.prototype.execute=Ua.prototype.L;Ua.prototype.resetPermissions=Ua.prototype.Ge;Ua.prototype.resetQuota=Ua.prototype.Za;
Ua.prototype.getQuota=Ua.prototype.K;Ua.prototype.addNativeInstruction=Ua.prototype.Wb;Ua.prototype.addInstruction=Ua.prototype.T;var Va=function(a){for(var b=[],c=0;c<a.length();c++)a.has(c)&&(b[c]=a.get(c));return b};var Wa={Ne:"concat every filter forEach hasOwnProperty indexOf join lastIndexOf map pop push reduce reduceRight reverse shift slice some sort splice unshift toString".split(" "),concat:function(a,b){for(var c=[],d=0;d<this.length();d++)c.push(this.get(d));for(d=1;d<arguments.length;d++)if(arguments[d]instanceof w)for(var e=arguments[d],g=0;g<e.length();g++)c.push(e.get(g));else c.push(arguments[d]);return new w(c)},every:function(a,b){for(var c=this.length(),d=0;d<this.length()&&d<c;d++)if(this.has(d)&&
!b.o(a,this.get(d),d,this))return!1;return!0},filter:function(a,b){for(var c=this.length(),d=[],e=0;e<this.length()&&e<c;e++)this.has(e)&&b.o(a,this.get(e),e,this)&&d.push(this.get(e));return new w(d)},forEach:function(a,b){for(var c=this.length(),d=0;d<this.length()&&d<c;d++)this.has(d)&&b.o(a,this.get(d),d,this)},hasOwnProperty:function(a,b){return this.has(b)},indexOf:function(a,b,c){var d=this.length(),e=void 0===c?0:Number(c);0>e&&(e=Math.max(d+e,0));for(var g=e;g<d;g++)if(this.has(g)&&this.get(g)===
b)return g;return-1},join:function(a,b){for(var c=[],d=0;d<this.length();d++)c.push(this.get(d));return c.join(b)},lastIndexOf:function(a,b,c){var d=this.length(),e=d-1;void 0!==c&&(e=0>c?d+c:Math.min(c,e));for(var g=e;0<=g;g--)if(this.has(g)&&this.get(g)===b)return g;return-1},map:function(a,b){for(var c=this.length(),d=[],e=0;e<this.length()&&e<c;e++)this.has(e)&&(d[e]=b.o(a,this.get(e),e,this));return new w(d)},pop:function(){return this.pop()},push:function(a,b){return this.push.apply(this,Array.prototype.slice.call(arguments,
1))},reduce:function(a,b,c){var d=this.length(),e,g;if(void 0!==c)e=c,g=0;else{if(0==d)throw"TypeError: Reduce on List with no elements.";for(var h=0;h<d;h++)if(this.has(h)){e=this.get(h);g=h+1;break}if(h==d)throw"TypeError: Reduce on List with no elements.";}for(h=g;h<d;h++)this.has(h)&&(e=b.o(a,e,this.get(h),h,this));return e},reduceRight:function(a,b,c){var d=this.length(),e,g;if(void 0!==c)e=c,g=d-1;else{if(0==d)throw"TypeError: ReduceRight on List with no elements.";for(var h=1;h<=d;h++)if(this.has(d-
h)){e=this.get(d-h);g=d-(h+1);break}if(h>d)throw"TypeError: ReduceRight on List with no elements.";}for(h=g;0<=h;h--)this.has(h)&&(e=b.o(a,e,this.get(h),h,this));return e},reverse:function(){for(var a=Va(this),b=a.length-1,c=0;0<=b;b--,c++)a.hasOwnProperty(b)?this.set(c,a[b]):this.remove(c);return this},shift:function(){return this.shift()},slice:function(a,b,c){var d=this.length();void 0===b&&(b=0);b=0>b?Math.max(d+b,0):Math.min(b,d);c=void 0===c?d:0>c?Math.max(d+c,0):Math.min(c,d);c=Math.max(b,
c);for(var e=[],g=b;g<c;g++)e.push(this.get(g));return new w(e)},some:function(a,b){for(var c=this.length(),d=0;d<this.length()&&d<c;d++)if(this.has(d)&&b.o(a,this.get(d),d,this))return!0;return!1},sort:function(a,b){var c=Va(this);void 0===b?c.sort():c.sort(function(c,d){return Number(b.o(a,c,d))});for(var d=0;d<c.length;d++)c.hasOwnProperty(d)?this.set(d,c[d]):this.remove(d)},splice:function(a,b,c,d){return this.splice.apply(this,Array.prototype.splice.call(arguments,1,arguments.length-1))},toString:function(){return this.toString()},
unshift:function(a,b){return this.unshift.apply(this,Array.prototype.slice.call(arguments,1))}};var y={qc:{ADD:0,AND:1,APPLY:2,ASSIGN:3,BREAK:4,CASE:5,CONTINUE:6,CONTROL:49,CREATE_ARRAY:7,CREATE_OBJECT:8,DEFAULT:9,DEFN:50,DIVIDE:10,DO:11,EQUALS:12,EXPRESSION_LIST:13,FN:51,FOR:14,FOR_IN:47,GET:15,GET_CONTAINER_VARIABLE:48,GET_INDEX:16,GET_PROPERTY:17,GREATER_THAN:18,GREATER_THAN_EQUALS:19,IDENTITY_EQUALS:20,IDENTITY_NOT_EQUALS:21,IF:22,LESS_THAN:23,LESS_THAN_EQUALS:24,MODULUS:25,MULTIPLY:26,NEGATE:27,NOT:28,NOT_EQUALS:29,NULL:45,OR:30,PLUS_EQUALS:31,POST_DECREMENT:32,POST_INCREMENT:33,PRE_DECREMENT:34,
PRE_INCREMENT:35,QUOTE:46,RETURN:36,SET_PROPERTY:43,SUBTRACT:37,SWITCH:38,TERNARY:39,TYPEOF:40,UNDEFINED:44,VAR:41,WHILE:42}},Xa="charAt concat indexOf lastIndexOf match replace search slice split substring toLowerCase toLocaleLowerCase toString toUpperCase toLocaleUpperCase trim".split(" "),Ya=new v("break"),Za=new v("continue");y.add=function(a,b){return this.evaluate(a)+this.evaluate(b)};y.and=function(a,b){return this.evaluate(a)&&this.evaluate(b)};
y.apply=function(a,b,c){a=this.evaluate(a);b=this.evaluate(b);c=this.evaluate(c);if(!(c instanceof w))throw"Error: Non-List argument given to Apply instruction.";if(null===a||void 0===a)throw"TypeError: Can't read property "+b+" of "+a+".";if("boolean"==typeof a||"number"==typeof a){if("toString"==b)return a.toString();throw"TypeError: "+a+"."+b+" is not a function.";}if("string"==typeof a){if(0<=va(Xa,b))return Sa(a[b].apply(a,Va(c)));throw"TypeError: "+b+" is not a function";}if(a instanceof w){if(a.has(b)){var d=
a.get(b);if(d instanceof x){var e=Va(c);e.unshift(this.F());return d.o.apply(d,e)}throw"TypeError: "+b+" is not a function";}if(0<=va(Wa.Ne,b))return e=Va(c),e.unshift(this.F()),Wa[b].apply(a,e)}if(a instanceof x||a instanceof La){if(a.has(b)){d=a.get(b);if(d instanceof x)return e=Va(c),e.unshift(this.F()),d.o.apply(d,e);throw"TypeError: "+b+" is not a function";}if("toString"==b)return a instanceof x?a.getName():a.toString();if("hasOwnProperty"==b)return a.has.apply(a,Va(c))}throw"TypeError: Object has no '"+
b+"' property.";};y.assign=function(a,b){a=this.evaluate(a);if("string"!=typeof a)throw"Invalid key name given for assignment.";var c=this.F();if(!c.has(a))throw"Attempting to assign to undefined value "+b;var d=this.evaluate(b);c.set(a,d);return d};y["break"]=function(){return Ya};y["case"]=function(a){for(var b=this.evaluate(a),c=0;c<b.length;c++){var d=this.evaluate(b[c]);if(d instanceof v)return d}};y["continue"]=function(){return Za};
y.zd=function(a,b,c){var d=new w;b=this.evaluate(b);for(var e=0;e<b.length;e++)d.push(b[e]);var g=[y.qc.FN,a,d].concat(Array.prototype.splice.call(arguments,2,arguments.length-2));this.F().set(a,this.evaluate(g))};y.Cd=function(a,b){return this.evaluate(a)/this.evaluate(b)};y.Fd=function(a,b){return this.evaluate(a)==this.evaluate(b)};y.Gd=function(a){for(var b,c=0;c<arguments.length;c++)b=this.evaluate(arguments[c]);return b};
y.Jd=function(a,b,c){a=this.evaluate(a);b=this.evaluate(b);c=this.evaluate(c);var d=this.F();if("string"==typeof b)for(var e=0;e<b.length;e++){d.set(a,e);var g=this.xa(c);if(g instanceof v){if("break"==g.C)break;if("return"==g.C)return g}}else if(b instanceof La||b instanceof w||b instanceof x){var h=b.V(),k=h.length();for(e=0;e<k;e++)if(d.set(a,h.get(e)),g=this.xa(c),g instanceof v){if("break"==g.C)break;if("return"==g.C)return g}}};y.get=function(a){return this.F().get(this.evaluate(a))};
y.ic=function(a,b){var c;a=this.evaluate(a);b=this.evaluate(b);if(void 0===a||null===a)throw"TypeError: cannot access property of "+a+".";a instanceof La||a instanceof w||a instanceof x?c=a.get(b):"string"==typeof a&&("length"==b?c=a.length:ka(b)&&(c=a[b]));return c};y.Nd=function(a,b){return this.evaluate(a)>this.evaluate(b)};y.Od=function(a,b){return this.evaluate(a)>=this.evaluate(b)};y.Vd=function(a,b){return this.evaluate(a)===this.evaluate(b)};y.Wd=function(a,b){return this.evaluate(a)!==this.evaluate(b)};
y["if"]=function(a,b,c){var d=[];this.evaluate(a)?d=this.evaluate(b):c&&(d=this.evaluate(c));var e=this.xa(d);if(e instanceof v)return e};y.de=function(a,b){return this.evaluate(a)<this.evaluate(b)};y.ee=function(a,b){return this.evaluate(a)<=this.evaluate(b)};y.he=function(a,b){return this.evaluate(a)%this.evaluate(b)};y.multiply=function(a,b){return this.evaluate(a)*this.evaluate(b)};y.ie=function(a){return-this.evaluate(a)};y.je=function(a){return!this.evaluate(a)};
y.ke=function(a,b){return this.evaluate(a)!=this.evaluate(b)};y["null"]=function(){return null};y.or=function(a,b){return this.evaluate(a)||this.evaluate(b)};y.Ec=function(a,b){var c=this.evaluate(a);this.evaluate(b);return c};y.Fc=function(a){return this.evaluate(a)};y.quote=function(a){return Array.prototype.slice.apply(arguments)};y["return"]=function(a){return new v("return",this.evaluate(a))};
y.setProperty=function(a,b,c){a=this.evaluate(a);b=this.evaluate(b);c=this.evaluate(c);if(null===a||void 0===a)throw"TypeError: Can't set property "+b+" of "+a+".";(a instanceof x||a instanceof w||a instanceof La)&&a.set(b,c);return c};y.Me=function(a,b){return this.evaluate(a)-this.evaluate(b)};
y["switch"]=function(a,b,c){a=this.evaluate(a);b=this.evaluate(b);c=this.evaluate(c);if(!ua(b)||!ua(c))throw"Error: Malformed switch instruction.";for(var d,e=!1,g=0;g<b.length;g++)if(e||a===this.evaluate(b[g]))if(d=this.evaluate(c[g]),d instanceof v){var h=d.C;if("break"==h)return;if("return"==h||"continue"==h)return d}else e=!0;if(c.length==b.length+1&&(d=this.evaluate(c[c.length-1]),d instanceof v&&("return"==d.C||"continue"==d.C)))return d};
y.Oe=function(a,b,c){return this.evaluate(a)?this.evaluate(b):this.evaluate(c)};y["typeof"]=function(a){a=this.evaluate(a);return a instanceof x?"function":typeof a};y.undefined=function(){};y["var"]=function(a){for(var b=this.F(),c=0;c<arguments.length;c++){var d=arguments[c];"string"!=typeof d||b.add(d,void 0)}};
y["while"]=function(a,b,c,d){var e,g=this.evaluate(d);if(this.evaluate(c)&&(e=this.xa(g),e instanceof v)){if("break"==e.C)return;if("return"==e.C)return e}for(;this.evaluate(a);){e=this.xa(g);if(e instanceof v){if("break"==e.C)break;if("return"==e.C)return e}this.evaluate(b)}};var cb=function(){this.oc=!1;this.H=new Ua;$a(this);this.oc=!0};cb.prototype.ae=function(){return this.oc};cb.prototype.isInitialized=cb.prototype.ae;cb.prototype.L=function(a){this.H.I.yc(String(a[0]))||(this.H.I.reset(),this.H.I.Ic(!0));return this.H.zb(a)};cb.prototype.execute=cb.prototype.L;cb.prototype.M=function(){this.H.M()};cb.prototype.makeImmutable=cb.prototype.M;
var $a=function(a){function b(a,b){e.H.Wb(a,String(b))}function c(a,b){e.H.T(String(d[a]),b)}var d=y.qc,e=a;b("control",d.CONTROL);b("fn",d.FN);b("list",d.CREATE_ARRAY);b("map",d.CREATE_OBJECT);b("undefined",d.UNDEFINED);c("ADD",y.add);c("AND",y.and);c("APPLY",y.apply);c("ASSIGN",y.assign);c("BREAK",y["break"]);c("CASE",y["case"]);c("CONTINUE",y["continue"]);c("DEFAULT",y["case"]);c("DEFN",y.zd);c("DIVIDE",y.Cd);c("EQUALS",y.Fd);c("EXPRESSION_LIST",y.Gd);c("FOR_IN",y.Jd);c("GET",y.get);c("GET_INDEX",
y.ic);c("GET_PROPERTY",y.ic);c("GREATER_THAN",y.Nd);c("GREATER_THAN_EQUALS",y.Od);c("IDENTITY_EQUALS",y.Vd);c("IDENTITY_NOT_EQUALS",y.Wd);c("IF",y["if"]);c("LESS_THAN",y.de);c("LESS_THAN_EQUALS",y.ee);c("MODULUS",y.he);c("MULTIPLY",y.multiply);c("NEGATE",y.ie);c("NOT",y.je);c("NOT_EQUALS",y.ke);c("NULL",y["null"]);c("OR",y.or);c("POST_DECREMENT",y.Ec);c("POST_INCREMENT",y.Ec);c("PRE_DECREMENT",y.Fc);c("PRE_INCREMENT",y.Fc);c("QUOTE",y.quote);c("RETURN",y["return"]);c("SET_PROPERTY",y.setProperty);
c("SUBTRACT",y.Me);c("SWITCH",y["switch"]);c("TERNARY",y.Oe);c("TYPEOF",y["typeof"]);c("VAR",y["var"]);c("WHILE",y["while"])};cb.prototype.T=function(a,b){this.H.T(a,b)};cb.prototype.addInstruction=cb.prototype.T;cb.prototype.K=function(){return this.H.K()};cb.prototype.getQuota=cb.prototype.K;cb.prototype.Za=function(){this.H.Za()};cb.prototype.resetQuota=cb.prototype.Za;cb.prototype.oa=function(a){this.H.I.oa(a)};cb.prototype.Pa=function(a,b,c){this.H.I.Xb(a,b,c)};var db=function(){this.Va={}};db.prototype.get=function(a){return this.Va.hasOwnProperty(a)?this.Va[a]:void 0};db.prototype.add=function(a,b){if(this.Va.hasOwnProperty(a))throw"Attempting to add a function which already exists: "+a+".";if(!b)throw"Attempting to add an undefined function: "+a+".";var c=new x(a,function(){for(var a=Array.prototype.slice.call(arguments,0),c=0;c<a.length;c++)a[c]=this.evaluate(a[c]);return b.apply(this,a)});c.M();this.Va[a]=c};
db.prototype.addAll=function(a){for(var b in a)a.hasOwnProperty(b)&&this.add(b,a[b])};var z=window,B=document,eb=navigator,fb=B.currentScript&&B.currentScript.src,gb=function(a,b){var c=z[a];z[a]=void 0===c?b:c;return z[a]},hb=function(a,b){b&&(a.addEventListener?a.onload=b:a.onreadystatechange=function(){a.readyState in{loaded:1,complete:1}&&(a.onreadystatechange=null,b())})},ib=function(a,b,c){var d=B.createElement("script");d.type="text/javascript";d.async=!0;d.src=a;hb(d,b);c&&(d.onerror=c);ha()&&d.setAttribute("nonce",ha());var e=B.getElementsByTagName("script")[0]||B.body||B.head;
e.parentNode.insertBefore(d,e);return d},jb=function(a,b){var c=B.createElement("iframe");c.height="0";c.width="0";c.style.display="none";c.style.visibility="hidden";var d=B.body&&B.body.lastChild||B.body||B.head;d.parentNode.insertBefore(c,d);hb(c,b);void 0!==a&&(c.src=a);return c},G=function(a,b,c){var d=new Image(1,1);d.onload=function(){d.onload=null;b&&b()};d.onerror=function(){d.onerror=null;c&&c()};d.src=a},kb=function(a,b,c,d){a.addEventListener?a.addEventListener(b,c,!!d):a.attachEvent&&
a.attachEvent("on"+b,c)},lb=function(a,b,c,d){a.removeEventListener?a.removeEventListener(b,c,!!d):a.detachEvent&&a.detachEvent("on"+b,c)},I=function(a){z.setTimeout(a,0)},nb=function(a){var b=B.getElementById(a);if(b&&mb(b,"id")!=a)for(var c=1;c<document.all[a].length;c++)if(mb(document.all[a][c],"id")==a)return document.all[a][c];return b},mb=function(a,b){return a&&b&&a.attributes&&a.attributes[b]?a.attributes[b].value:null},ob=function(a){var b=a.innerText||a.textContent||"";b&&" "!=b&&(b=b.replace(/^[\s\xa0]+|[\s\xa0]+$/g,
""));b&&(b=b.replace(/(\xa0+|\s{2,}|\n|\r\t)/g," "));return b},qb=function(a){var b=B.createElement("div");b.innerHTML="A<div>"+a+"</div>";b=b.lastChild;for(var c=[];b.firstChild;)c.push(b.removeChild(b.firstChild));return c},rb=function(a){eb.sendBeacon&&eb.sendBeacon(a)||G(a)};var sb=/^(?:(?:https?|mailto|ftp):|[^:/?#]*(?:[/?#]|$))/i;var tb=/:[0-9]+$/,ub=function(a,b,c){for(var d=a.split("&"),e=0;e<d.length;e++){var g=d[e].split("=");if(decodeURIComponent(g[0]).replace(/\+/g," ")==b){var h=g.slice(1).join("=");return c?h:decodeURIComponent(h).replace(/\+/g," ")}}},vb=function(a,b,c,d,e){var g,h=function(a){return a?a.replace(":","").toLowerCase():""},k=h(a.protocol)||h(z.location.protocol);b&&(b=String(b).toLowerCase());switch(b){case "protocol":g=k;break;case "host":g=(a.hostname||z.location.hostname).replace(tb,"").toLowerCase();
if(c){var l=/^www\d*\./.exec(g);l&&l[0]&&(g=g.substr(l[0].length))}break;case "port":g=String(Number(a.hostname?a.port:z.location.port)||("http"==k?80:"https"==k?443:""));break;case "path":g="/"==a.pathname.substr(0,1)?a.pathname:"/"+a.pathname;var m=g.split("/");0<=va(d||[],m[m.length-1])&&(m[m.length-1]="");g=m.join("/");break;case "query":g=a.search.replace("?","");e&&(g=ub(g,e));break;case "extension":var n=a.pathname.split(".");g=1<n.length?n[n.length-1]:"";g=g.split("/")[0];break;case "fragment":g=
a.hash.replace("#","");break;default:g=a&&a.href}return g},wb=function(a){var b="";a&&a.href&&(b=a.hash?a.href.replace(a.hash,""):a.href);return b},N=function(a){var b=document.createElement("a");a&&(sb.test(a),b.href=a);var c=b.pathname;"/"!==c[0]&&(c="/"+c);var d=b.hostname.replace(tb,"");return{href:b.href,protocol:b.protocol,host:b.host,hostname:d,pathname:c,search:b.search,hash:b.hash,port:b.port}};var zb=function(){this.Ya=new cb;var a=new db;a.addAll(xb());yb(this,function(b){return a.get(b)})},xb=function(){return{callInWindow:Ab,callLater:Bb,copyFromWindow:Cb,encodeURI:encodeURI,encodeURIComponent:encodeURIComponent,getReferrer:Db,getUrl:Eb,getUrlFragment:Fb,isPlainObject:Gb,injectHiddenIframe:Hb,injectScript:Ib,logToConsole:Jb,queryPermission:Kb,removeUrlFragment:Lb,replaceAll:Mb,sendPixel:Nb,setInWindow:Ob}};zb.prototype.L=function(a){return this.Ya.L(a)};zb.prototype.execute=zb.prototype.L;
var yb=function(a,b){a.Ya.T("require",b)};zb.prototype.oa=function(a){this.Ya.oa(a)};zb.prototype.Pa=function(a,b,c){this.Ya.Pa(a,b,c)};function Ab(a,b){for(var c=a.split("."),d=z,e=d[c[0]],g=1;e&&g<c.length;g++)d=e,e=e[c[g]];if("function"==Na(e)){var h=[];for(g=1;g<arguments.length;g++)h.push(Ra(arguments[g]));e.apply(d,h)}}function Bb(a){var b=this.F();I(function(){a instanceof x&&a.o(b)})}
function Cb(a,b,c){for(var d=a.split("."),e=z,g=0;g<d.length-1;g++)if(e=e[d[g]],void 0===e||null===e)return;b&&(void 0===e[d[g]]||c&&!e[d[g]])&&(e[d[g]]=Ra(b));return Sa(e[d[g]])}function Db(){return B.referrer}function Eb(a,b,c,d){var e=z.location.href,g;if(c&&c instanceof w){g=[];for(var h=0;h<c.length();h++){var k=c.get(h);"string"==typeof k&&g.push(k)}}return vb(N(e),a,b,g,d)}function Fb(a){return vb(N(a),"fragment")}function Gb(a){return a instanceof La}
function Hb(a,b){var c=this.F();jb(a,function(){b instanceof x&&b.o(c)})}var Pb={};
function Ib(a,b,c,d){this.ya().assert("injectScript",a);var e=this.F(),g=function(){b instanceof x&&b.o(e)},h=function(){c instanceof x&&c.o(e)};d?Pb[d]?(Pb[d].onSuccess.push(g),Pb[d].onFailure.push(h)):(Pb[d]={onSuccess:[g],onFailure:[h]},g=function(){for(var a=Pb[d].onSuccess,b=0;b<a.length;b++)I(a[b]);a.push=function(a){I(a);return 0}},h=function(){for(var a=Pb[d].onFailure,b=0;b<a.length;b++)I(a[b]);Pb[d]=null},ib(a,g,h)):ib(a,g,h)}
function Jb(){try{this.ya().assert("logging")}catch(c){return}for(var a=Array.prototype.slice.call(arguments,0),b=0;b<a.length;b++)a[b]=Ra(a[b]);console.log.apply(console,a)}function Lb(a){return wb(N(a))}function Mb(a,b,c){return a.replace(new RegExp(b,"g"),c)}function Nb(a,b,c){this.ya().assert("sendPixel",a);var d=this.F();G(a,function(){b instanceof x&&b.o(d)},function(){c instanceof x&&c.o(d)})}
function Ob(a,b,c){this.ya().assert("writeGlobals",a);for(var d=a.split("."),e=z,g=0;g<d.length-1;g++)if(e=e[d[g]],void 0===e)return!1;return void 0===e[d[g]]||c?(e[d[g]]=Ra(b),!0):!1}function Kb(a,b){try{return this.ya().assert.apply(null,Array.prototype.slice.call(arguments,0)),!0}catch(c){return!1}};var mc,nc=[],oc=[],pc=[],qc=[],rc=[],sc={},tc,uc,vc,wc=function(a,b){var c={};c["function"]="__"+a;for(var d in b)b.hasOwnProperty(d)&&(c["vtp_"+d]=b[d]);return c},xc=function(a){var b=a["function"];if(!b)throw"Error: No function name given for function call.";var c=!!sc[b],d={},e;for(e in a)a.hasOwnProperty(e)&&0===e.indexOf("vtp_")&&(d[c?e:e.substr(4)]=a[e]);return c?sc[b](d):mc(b,d)},zc=function(a,b,c,d){c=c||[];d=d||qa;var e={},g;for(g in a)a.hasOwnProperty(g)&&(e[g]=yc(a[g],b,c,d));return e},
Ac=function(a){var b=a["function"];if(!b)throw"Error: No function name given for function call.";var c=sc[b];return c?c.b||0:0},yc=function(a,b,c,d){if(ua(a)){var e;switch(a[0]){case "function_id":return a[1];case "list":e=[];for(var g=1;g<a.length;g++)e.push(yc(a[g],b,c,d));return e;case "macro":var h=a[1];if(c[h])return;var k=nc[h];if(!k||b(k))return;c[h]=!0;try{var l=zc(k,b,c,d);e=xc(l);vc&&(e=vc.ud(e,l))}catch(C){d(h,C),e=!1}c[h]=!1;return e;case "map":e={};for(var m=1;m<a.length;m+=2)e[yc(a[m],
b,c,d)]=yc(a[m+1],b,c,d);return e;case "template":e=[];for(var n=!1,p=1;p<a.length;p++){var q=yc(a[p],b,c,d);uc&&(n=n||q===uc.Ja);e.push(q)}return uc&&n?uc.vd(e):e.join("");case "escape":e=yc(a[1],b,c,d);if(uc&&ua(a[1])&&"macro"===a[1][0]&&uc.be(a))return uc.ue(e);e=String(e);for(var r=2;r<a.length;r++)Qb[a[r]]&&(e=Qb[a[r]](e));return e;case "tag":var u=a[1];if(!qc[u])throw Error("Unable to resolve tag reference "+u+".");return e={cc:a[2],index:u};case "zb":var t={arg0:a[2],arg1:a[3],ignore_case:a[5]};
t["function"]=a[1];var A=Dc(t,b,c,d);a[4]&&(A=!A);return A;default:throw Error("Attempting to expand unknown Value type: "+a[0]+".");}}return a},Dc=function(a,b,c,d){try{return tc(zc(a,b,c,d))}catch(e){JSON.stringify(a)}return null};var Ec=null,Ic=function(a){function b(a){for(var b=0;b<a.length;b++)d[a[b]]=!0}var c=[],d=[];Ec=Fc(a,Gc()||function(){});for(var e=0;e<oc.length;e++){var g=oc[e],h=Hc(g);if(h){for(var k=g.add||[],l=0;l<k.length;l++)c[k[l]]=!0;b(g.block||[])}else null===h&&b(g.block||[])}var m=[];for(e=0;e<qc.length;e++)c[e]&&!d[e]&&(m[e]=!0);return m},Hc=function(a){for(var b=a["if"]||[],c=0;c<b.length;c++){var d=Ec(b[c]);if(!d)return null===d?null:!1}var e=a.unless||[];for(c=0;c<e.length;c++){d=Ec(e[c]);if(null===
d)return null;if(d)return!1}return!0};var Fc=function(a,b){var c=[];return function(d){void 0===c[d]&&(c[d]=Dc(pc[d],a,void 0,b));return c[d]}};/*
Copyright (c) 2014 Derek Brans, MIT license https://github.com/krux/postscribe/blob/master/LICENSE. Portions derived from simplehtmlparser, which is licensed under the Apache License, Version 2.0 */
var Lc={},Mc=null;Lc.w="UA-110820930-3";Lc.Na="af";var Nc=null,Oc=null,Pc="//www.googletagmanager.com/a?id="+Lc.w+"&cv=1",Qc={},Rc={},Sc=function(){var a=Mc.sequence||0;Mc.sequence=a+1;return a};var P=function(){var a=function(a){return{toString:function(){return a}}};return{Lb:a("convert_case_to"),Mb:a("convert_false_to"),Nb:a("convert_null_to"),Ob:a("convert_true_to"),Pb:a("convert_undefined_to"),ca:a("function"),Mc:a("instance_name"),Nc:a("live_only"),Oc:a("malware_disabled"),Pc:a("once_per_event"),Rb:a("once_per_load"),Sb:a("setup_tags"),Qc:a("tag_id"),Tb:a("teardown_tags")}}();var Tc=new Ea,Uc={},Xc={set:function(a,b){Qa(Vc(a,b),Uc)},get:function(a){return Wc(a,2)},reset:function(){Tc=new Ea;Uc={}}},Wc=function(a,b){return 2!=b?Tc.get(a):Yc(a)},Yc=function(a,b,c){var d=a.split(".");var e=function(a,b){for(var c=0;void 0!==a&&c<d.length;c++){if(null===a)return!1;a=a[d[c]]}return void 0!==a||1<c?a:b.length?e(Zc(b.pop()),b):$c(d)};return e(Uc.eventModel,[b,c]);return $c(d)},$c=function(a){for(var b=Uc,c=0;c<a.length;c++){if(null===
b)return!1;if(void 0===b)break;b=b[a[c]]}return b};var Zc=function(a){if(a){var b=$c(["gtag","targets",a]);return Pa(b)?b:void 0}},ad=function(a,b){function c(a){if(a)for(var b in a)a.hasOwnProperty(b)&&(d[b]=null)}var d={};c(Uc);delete d.eventModel;c(Zc(a));c(Zc(b));c(Uc.eventModel);var e=[],g;for(g in d)d.hasOwnProperty(g)&&e.push(g);return e};
var bd=function(a,b){Tc.set(a,b);Qa(Vc(a,b),Uc)},Vc=function(a,b){for(var c={},d=c,e=a.split("."),g=0;g<e.length-1;g++)d=d[e[g]]={};d[e[e.length-1]]=b;return c};var cd=new RegExp(/^(.*\.)?(google|youtube|blogger|withgoogle)(\.com?)?(\.[a-z]{2})?\.?$/),dd={customPixels:["nonGooglePixels"],html:["customScripts","customPixels","nonGooglePixels","nonGoogleScripts","nonGoogleIframes"],customScripts:["html","customPixels","nonGooglePixels","nonGoogleScripts","nonGoogleIframes"],nonGooglePixels:[],nonGoogleScripts:["nonGooglePixels"],nonGoogleIframes:["nonGooglePixels"]},ed={customPixels:["customScripts","html"],html:["customScripts"],customScripts:["html"],nonGooglePixels:["customPixels",
"customScripts","html","nonGoogleScripts","nonGoogleIframes"],nonGoogleScripts:["customScripts","html"],nonGoogleIframes:["customScripts","html","nonGoogleScripts"]},fd=function(a,b){for(var c=[],d=0;d<a.length;d++)c.push(a[d]),c.push.apply(c,b[a[d]]||[]);return c};
var gd=function(a){var b=Wc("gtm.whitelist");b=["google","gtagfl","oid","op"];var c=b&&fd(Ba(b),dd),d=Wc("gtm.blacklist")||Wc("tagTypeBlacklist")||[];
cd.test(z.location&&z.location.hostname)&&(d=Ba(d),d.push("nonGooglePixels","nonGoogleScripts"));var e=d&&fd(Ba(d),ed),g={};return function(h){var k=h&&h[P.ca];if(!k||"string"!=typeof k)return!0;k=k.replace(/^_*/,"");if(void 0!==g[k])return g[k];var l=Rc[k]||[],m=a(k);if(b){var n;if(n=m)a:{if(0>va(c,k))if(l&&0<l.length)for(var p=0;p<l.length;p++){if(0>va(c,l[p])){n=!1;break a}}else{n=!1;break a}n=!0}m=n}var q=!1;if(d){var r;if(!(r=
0<=va(e,k)))a:{for(var u=l||[],t=new Ea,A=0;A<e.length;A++)t.set(e[A],!0);for(var C=0;C<u.length;C++)if(t.get(u[C])){r=!0;break a}r=!1}q=r}return g[k]=!m||q}};var hd={ud:function(a,b){b[P.Lb]&&"string"===typeof a&&(a=1==b[P.Lb]?a.toLowerCase():a.toUpperCase());b.hasOwnProperty(P.Nb)&&null===a&&(a=b[P.Nb]);b.hasOwnProperty(P.Pb)&&void 0===a&&(a=b[P.Pb]);b.hasOwnProperty(P.Ob)&&!0===a&&(a=b[P.Ob]);b.hasOwnProperty(P.Mb)&&!1===a&&(a=b[P.Mb]);return a}};var id=function(a,b){this.qe=b};ia(id,Error);id.prototype.getParameters=function(){return this.qe};var jd=function(a){var b=Mc.zones;!b&&a&&(b=Mc.zones=a());return b},kd={active:!0,isWhitelisted:function(){return!0}};var ld=!1,md=0,nd=[];function od(a){if(!ld){var b=B.createEventObject,c="complete"==B.readyState,d="interactive"==B.readyState;if(!a||"readystatechange"!=a.type||c||!b&&d){ld=!0;for(var e=0;e<nd.length;e++)I(nd[e])}nd.push=function(){for(var a=0;a<arguments.length;a++)I(arguments[a]);return 0}}}function pd(){if(!ld&&140>md){md++;try{B.documentElement.doScroll("left"),od()}catch(a){z.setTimeout(pd,50)}}}var qd=function(a){ld?a():nd.push(a)};var rd=function(){function a(a){return!ta(a)||0>a?0:a}if(!Mc._li&&z.performance&&z.performance.timing){var b=z.performance.timing.navigationStart,c=ta(Xc.get("gtm.start"))?Xc.get("gtm.start"):0;Mc._li={cst:a(c-b),cbt:a(Nc-b)}}};var sd=!1,td=function(){return z.GoogleAnalyticsObject&&z[z.GoogleAnalyticsObject]};var ud=function(a){z.GoogleAnalyticsObject||(z.GoogleAnalyticsObject=a||"ga");var b=z.GoogleAnalyticsObject;if(!z[b]){var c=function(){c.q=c.q||[];c.q.push(arguments)};c.l=Number(new Date);z[b]=c}rd();return z[b]},wd=function(a,b,c,d){b=String(b).replace(/\s+/g,"").split(",");var e=td();e(a+"require","linker");e(a+"linker:autoLink",b,c,d)};
var Ad=function(){return"&tc="+qc.filter(function(a){return a}).length},Bd="0.005000">Math.random(),Cd=function(){var a=0,b=0;return{ce:function(){if(2>a)return!1;1E3<=Da()-b&&(a=0);return 2<=a},Be:function(){1E3<=Da()-b&&(a=0);a++;b=Da()}}},Dd="",Ed=function(){Dd=[Pc,"&v=3&t=t","&pid="+wa(),"&rv="+Lc.Na].join("")},Fd={},Gd="",Hd=void 0,Id={},Jd={},Kd=void 0,Ld=null,Md=1E3,Nd=function(){var a=Hd;return void 0===a?"":[Dd,Fd[a]?"":"&es=1",Id[a],Ad(),Gd,"&z=0"].join("")},Od=function(){Kd&&
(z.clearTimeout(Kd),Kd=void 0);void 0===Hd||Fd[Hd]&&!Gd||(Jd[Hd]||Ld.ce()||0>=Md--?Jd[Hd]=!0:(Ld.Be(),G(Nd()),Fd[Hd]=!0,Gd=""))},Pd=function(a,b,c){if(Bd&&!Jd[a]&&b){a!==Hd&&(Od(),Hd=a);var d=c+String(b[P.ca]||"").replace(/_/g,"");Gd=Gd?Gd+"."+d:"&tr="+d;Kd||(Kd=z.setTimeout(Od,500));2022<=Nd().length&&Od()}};function Qd(a,b,c,d,e,g){var h=qc[a],k=Rd(a,b,c,d,e,g);if(!k)return null;var l=yc(h[P.Sb],g.X,[],Sd());if(l&&l.length){var m=l[0];k=Qd(m.index,b,k,1===m.cc?e:k,e,g)}return k}
function Rd(a,b,c,d,e,g){function h(){var b=zc(k,g.X,[],l);b.vtp_gtmOnSuccess=function(){Pd(g.id,qc[a],"5");c()};b.vtp_gtmOnFailure=function(){Pd(g.id,qc[a],"6");d()};b.vtp_gtmTagId=k.tag_id;if(k[P.Oc])d();else{Pd(g.id,k,"1");try{xc(b)}catch(C){Pd(g.id,
k,"7");e()}}}var k=qc[a];if(g.X(k))return null;var l=Sd(),m=yc(k[P.Tb],g.X,[],l);if(m&&m.length){var n=m[0],p=Qd(n.index,b,c,d,e,g);if(!p)return null;c=p;d=2===n.cc?e:p}if(k[P.Rb]||k[P.Pc]){var q=k[P.Rb]?rc:b,r=c,u=d;if(!q[a]){h=Ga(h);var t=Td(a,q,h);c=t.S;d=t.la}return function(){q[a](r,u)}}return h}
function Td(a,b,c){var d=[],e=[];b[a]=Ud(d,e,c);return{S:function(){b[a]=Vd;for(var c=0;c<d.length;c++)d[c]()},la:function(){b[a]=Wd;for(var c=0;c<e.length;c++)e[c]()}}}function Ud(a,b,c){return function(d,e){a.push(d);b.push(e);c()}}function Vd(a){a()}function Wd(a,b){b()}function Sd(){return function(){}};function Xd(a){var b=0,c=0,d=!1;return{add:function(){c++;return Ga(function(){b++;d&&b>=c&&a()})},bd:function(){d=!0;b>=c&&a()}}}function Yd(a,b){var c,d=b.b,e=a.b;c=d>e?1:d<e?-1:0;var g;if(0!==c)g=c;else{var h=a.Kc,k=b.Kc;g=h>k?1:h<k?-1:0}return g}
function Zd(a,b){if(!Bd)return;var c=function(a){var d=b.X(qc[a])?"3":"4",g=yc(qc[a][P.Sb],b.X,[],qa);g&&g.length&&c(g[0].index);Pd(b.id,qc[a],d);var h=yc(qc[a][P.Tb],b.X,[],qa);h&&h.length&&c(h[0].index)};c(a);}var $d=!1;function Gc(){return function(){}};var ae=function(a,b){var c=wc(a,b),d;for(d in void 0)(void 0).hasOwnProperty(d)&&(c[d]=(void 0)[d]);qc.push(c);return qc.length-1};var be="allow_ad_personalization_signals cookie_domain cookie_expires cookie_name cookie_path custom_params event_callback event_timeout groups send_to send_page_view session_duration user_properties".split(" ");var ce=/[A-Z]+/,de=/\s/,ee=function(a){if(sa(a)&&(a=a.trim(),!de.test(a))){var b=a.indexOf("-");if(!(0>b)){var c=a.substring(0,b);if(ce.test(c)){for(var d=a.substring(b+1).split("/"),e=0;e<d.length;e++)if(!d[e])return;return{id:a,prefix:c,containerId:c+"-"+d[0],W:d}}}}};var fe=null,ge={},he={},ie;function je(){fe=fe||!Mc.gtagRegistered;Mc.gtagRegistered=!0;return fe}var ke=function(a,b){var c={event:a};b&&(c.eventModel=Qa(b),b.event_callback&&(c.eventCallback=b.event_callback),b.event_timeout&&(c.eventTimeout=b.event_timeout));return c};
function le(a){if(void 0===he[a.id]){var b;switch(a.prefix){case "UA":b=ae("gtagua",{trackingId:a.id});break;case "AW":b=ae("gtagaw",{conversionId:a});break;case "DC":b=ae("gtagfl",{targetId:a.id});break;case "GF":b=ae("gtaggf",{conversionId:a});break;case "G":b=ae("get",{trackingId:a.id,isAutoTag:!0});break;case "HA":b=ae("gtagha",{conversionId:a});break;default:return}if(!ie){var c=wc("v",{name:"send_to",dataLayerVersion:2});nc.push(c);ie=["macro",nc.length-1]}var d={arg0:ie,arg1:a.id,ignore_case:!1};
d[P.ca]="_lc";pc.push(d);var e={"if":[pc.length-1],add:[b]};e["if"]&&(e.add||e.block)&&oc.push(e);he[a.id]=b}}
var ne={event:function(a){var b=a[1];if(sa(b)&&!(3<a.length)){var c;if(2<a.length){if(!Pa(a[2]))return;c=a[2]}var d=ke(b,c);var e;var g=c,h=Wc("gtag.fields.send_to",2);sa(h)||(h="send_to");var k=g&&g[h];void 0===k&&(k=Wc(h,2),void 0===k&&(k="default"));if(sa(k)||ua(k)){for(var l,m=k.toString().replace(/\s+/g,"").split(","),n=[],p=0;p<m.length;p++)0<=m[p].indexOf("-")?n.push(m[p]):n=n.concat(ge[m[p]]||[]);l=n;for(var q={},r=0;r<l.length;++r){var u=ee(l[r]);u&&(q[u.id]=
u)}var t=[],A;for(A in q)if(q.hasOwnProperty(A)){var C=q[A];"AW"===C.prefix&&C.W[1]&&t.push(C.containerId)}for(var D=0;D<t.length;++D)delete q[t[D]];var L=[],E;for(E in q)q.hasOwnProperty(E)&&L.push(q[E]);e=L}else e=void 0;if(!e)return;var F=je();F||me();for(var J=[],H=0;F&&H<e.length;H++){var K=e[H];J.push(K.id);le(K)}d.eventModel=d.eventModel||{};0<e.length?d.eventModel.send_to=J.join():delete d.eventModel.send_to;return d}},set:function(a){var b;2==a.length&&Pa(a[1])?
b=Qa(a[1]):3==a.length&&sa(a[1])&&(b={},b[a[1]]=a[2]);if(b)return b.eventModel=Qa(b),b.event="gtag.set",b._clear=!0,b},js:function(a){if(2==a.length&&a[1].getTime)return{event:"gtm.js","gtm.start":a[1].getTime()}},config:function(a){var b=a[2]||{};if(2>a.length||!sa(a[1])||!Pa(b))return;var c=ee(a[1]);if(!c)return;je()?le(c):me();var d=c.id,e;for(e in ge)if(ge.hasOwnProperty(e)){var g=va(ge[e],d);0<=g&&ge[e].splice(g,1)}var h=c.id,k=b.groups||"default";k=k.toString().split(",");
for(var l=0;l<k.length;l++)ge[k[l]]=ge[k[l]]||[],ge[k[l]].push(h);delete b.groups;bd("gtag.targets."+c.id,void 0);bd("gtag.targets."+c.id,Qa(b));var m={};m.send_to=c.id;return ke("gtag.config",m);}},me=Ga(function(){});var oe=!1,pe=[];function qe(){if(!oe){oe=!0;for(var a=0;a<pe.length;a++)I(pe[a])}};var re=[],se=!1,te=function(a){var b=a.eventCallback,c=Ga(function(){ra(b)&&I(function(){b(Lc.w)})}),d=a.eventTimeout;d&&z.setTimeout(c,Number(d));return c},ue=function(){for(var a=!1;!se&&0<re.length;){se=!0;delete Uc.eventModel;var b=re.shift();if(ra(b))try{b.call(Xc)}catch(Ie){}else if(ua(b)){var c=b;if(sa(c[0])){var d=c[0].split("."),e=d.pop(),g=c.slice(1),h=Wc(d.join("."),2);if(void 0!==h&&null!==h)try{h[e].apply(h,g)}catch(Ie){}}}else{var k=b;if(k&&("[object Arguments]"==Object.prototype.toString.call(k)||
Object.prototype.hasOwnProperty.call(k,"callee"))){a:{if(b.length&&sa(b[0])){var l=ne[b[0]];if(l){b=l(b);break a}}b=void 0}if(!b){se=!1;continue}}var m;var n=void 0,p=b,q=p._clear;for(n in p)p.hasOwnProperty(n)&&"_clear"!==n&&(q&&bd(n,void 0),bd(n,p[n]));var r=p.event;if(r){var u=p["gtm.uniqueEventId"];u||(u=Sc(),p["gtm.uniqueEventId"]=u,bd("gtm.uniqueEventId",u));Oc=r;var t;var A,C,D=p,L=D.event,E=D["gtm.uniqueEventId"],F=Mc.zones;C=F?F.checkState(Lc.w,E):kd;if(C.active){var J=te(D);c:{var H=C.isWhitelisted;
if("gtm.js"==L){if($d){A=!1;break c}$d=!0}var K=E,R=L;if(Bd&&!Jd[K]&&Hd!==K){Od();Hd=K;Gd="";var ja=Id,W=K,ba,M=R;ba=0===M.indexOf("gtm.")?encodeURIComponent(M):"*";ja[W]="&e="+ba+"&eid="+K;Kd||(Kd=z.setTimeout(Od,500))}var T=gd(H),O={id:E,name:L,callback:J||qa,X:T,Ea:[]};O.Ea=Ic(T);for(var za,ab=O,Vb=Xd(ab.callback),Bc=[],pb=[],bb=0;bb<qc.length;bb++)if(ab.Ea[bb]){var Je=qc[bb];var Wb=Vb.add();try{var Ke=Qd(bb,Bc,Wb,Wb,Wb,ab);Ke?pb.push({Kc:bb,b:Ac(Je),L:Ke}):(Zd(bb,ab),Wb())}catch(Ie){Wb()}}Vb.bd();pb.sort(Yd);for(var vd=0;vd<pb.length;vd++)pb[vd].L();za=0<pb.length;if("gtm.js"===L||"gtm.sync"===L)d:{}if(za){for(var Mg={__cl:!0,__evl:!0,__fsl:!0,__hl:!0,__jel:!0,__lcl:!0,__sdl:!0,__tl:!0,__ytl:!0},Cc=0;Cc<O.Ea.length;Cc++)if(O.Ea[Cc]){var Me=qc[Cc];if(Me&&!Mg[Me[P.ca]]){A=!0;break c}}A=!1}else A=za}t=A?!0:!1}else t=!1;Oc=null;m=t}else m=!1;a=m||a}se=!1}return!a},ve=function(){var a=ue();try{var b=z["dataLayer"].hide;if(b&&void 0!==b[Lc.w]&&b.end){b[Lc.w]=!1;var c=!0,d;for(d in b)if(b.hasOwnProperty(d)&&
!0===b[d]){c=!1;break}c&&(b.end(),b.end=null)}}catch(e){}return a},we=function(){var a=gb("dataLayer",[]),b=gb("google_tag_manager",{});b=b["dataLayer"]=b["dataLayer"]||{};nd.push(function(){b.gtmDom||(b.gtmDom=!0,a.push({event:"gtm.dom"}))});pe.push(function(){b.gtmLoad||(b.gtmLoad=!0,a.push({event:"gtm.load"}))});var c=a.push;a.push=function(){var b=[].slice.call(arguments,0);c.apply(a,b);for(re.push.apply(re,b);300<this.length;)this.shift();return ue()};re.push.apply(re,a.slice(0));
I(ve)};var xe={};xe.Ja=new String("undefined");xe.fb={};var ye=function(a){this.resolve=function(b){for(var c=[],d=0;d<a.length;d++)c.push(a[d]===xe.Ja?b:a[d]);return c.join("")}};ye.prototype.toString=function(){return this.resolve("undefined")};ye.prototype.valueOf=ye.prototype.toString;xe.vd=function(a){return new ye(a)};var ze={};xe.Ce=function(a,b){var c=Sc();ze[c]=[a,b];return c};xe.Zb=function(a){var b=a?0:1;return function(a){var c=ze[a];if(c&&"function"===typeof c[b])c[b]();ze[a]=void 0}};
xe.be=function(a){for(var b=!1,c=!1,d=2;d<a.length;d++)b=b||8===a[d],c=c||16===a[d];return b&&c};xe.ue=function(a){if(a===xe.Ja)return a;var b=Sc();xe.fb[b]=a;return'google_tag_manager["'+Lc.w+'"].macro('+b+")"};xe.Rc=ye;var Ae=new Ea,Be=function(a,b){function c(a){var b=N(a),c=vb(b,"protocol"),d=vb(b,"host",!0),e=vb(b,"port"),g=vb(b,"path").toLowerCase().replace(/\/$/,"");if(void 0===c||"http"==c&&"80"==e||"https"==c&&"443"==e)c="web",e="default";return[c,d,e,g]}for(var d=c(String(a)),e=c(String(b)),g=0;g<d.length;g++)if(d[g]!==e[g])return!1;return!0};
function Ce(a){var b=a.arg0,c=a.arg1;switch(a["function"]){case "_cn":return 0<=String(b).indexOf(String(c));case "_css":var d;a:{if(b){var e=["matches","webkitMatchesSelector","mozMatchesSelector","msMatchesSelector","oMatchesSelector"];try{for(var g=0;g<e.length;g++)if(b[e[g]]){d=b[e[g]](c);break a}}catch(u){}}d=!1}return d;case "_ew":var h,k;h=String(b);k=String(c);var l=h.length-k.length;return 0<=l&&h.indexOf(k,l)==l;case "_eq":return String(b)==String(c);case "_ge":return Number(b)>=Number(c);
case "_gt":return Number(b)>Number(c);case "_lc":var m;m=String(b).split(",");return 0<=va(m,String(c));case "_le":return Number(b)<=Number(c);case "_lt":return Number(b)<Number(c);case "_re":var n;var p=a.ignore_case?"i":void 0;try{var q=String(c)+p,r=Ae.get(q);r||(r=new RegExp(c,p),Ae.set(q,r));n=r.test(b)}catch(u){n=!1}return n;case "_sw":return 0==String(b).indexOf(String(c));case "_um":return Be(b,c)}return!1};var De=function(){return!1};function Ee(a,b){return Sa(Wc(a,b||2))}function Fe(){return(new Date).getTime()}function Ge(a){return ya(Ra(a))}function He(a){return null===a?"null":void 0===a?"undefined":a.toString()}function Ne(a,b){return wa(a,b)}function Oe(a,b,c){if(!(a instanceof w))return null;for(var d=new La,e=!1,g=0;g<a.length();g++){var h=a.get(g);h instanceof La&&h.has(b)&&h.has(c)&&(d.set(h.get(b),h.get(c)),e=!0)}return e?d:null}
var Pe=function(){var a=new db,b=xb();De()&&(b.loadJavaScript=qa,b.loadIframe=qa);a.addAll({injectScript:b.injectScript,sendPixel:b.sendPixel,injectHiddenIframe:b.injectHiddenIframe,encodeUri:b.encodeURI,encodeUriComponent:b.encodeURIComponent,setInWindow:b.setInWindow,copyFromWindow:b.copyFromWindow,copyFromDataLayer:Ee,getUrl:b.getUrl,getDate:Fe,callLater:b.callLater,generateRandom:Ne,makeTableMap:Oe,makeString:He,makeInteger:Ge,logToConsole:b.logToConsole,queryPermission:b.queryPermission});return function(b){return a.get(b)}};var Qe,Te=function(){var a=data.runtime||[],b=data.permissions||{};Qe=new zb;mc=function(a,b){var c=new La,d;for(d in b)b.hasOwnProperty(d)&&c.set(d,Sa(b[d]));var e=Qe.L([a,c]);e instanceof v&&"return"===e.C&&(e=e.getData());return Ra(e)};tc=Ce;yb(Qe,Pe());for(var c=0;c<a.length;c++){var d=a[c];if(!ua(d)||3>d.length){if(0==d.length)continue;return}Qe.L(d)}var e=function(a){throw Re(a,{},"The requested permission is not configured.");};Qe.oa(e);for(var g in b)if(b.hasOwnProperty(g)){var h=b[g],k=!1,
l;for(l in h)h.hasOwnProperty(l)&&(k=!0,Qe.Pa(g,l,Se(l,h[l])));k||Qe.Pa(g,"default",e)}};function Se(a,b){var c=wc(a,b);c.vtp_permissionName=a;c.vtp_createPermissionError=Re;return xc(c)}function Re(a,b,c){return new id(a,b,c)};var Ue=function(a,b){var c=function(){};c.prototype=a.prototype;var d=new c;a.apply(d,Array.prototype.slice.call(arguments,1));return d};var Ve=function(a){return encodeURIComponent(a)},We=function(a,b){if(!a)return!1;var c=vb(N(a),"host");if(!c)return!1;for(var d=0;b&&d<b.length;d++){var e=b[d]&&b[d].toLowerCase();if(e){var g=c.length-e.length;0<g&&"."!=e.charAt(0)&&(g--,e="."+e);if(0<=g&&c.indexOf(e,g)==g)return!0}}return!1};
var Q=function(a,b,c){for(var d={},e=!1,g=0;a&&g<a.length;g++)a[g]&&a[g].hasOwnProperty(b)&&a[g].hasOwnProperty(c)&&(d[a[g][b]]=a[g][c],e=!0);return e?d:null},Xe=function(a,b){Qa(a,b)},Ye=function(a){return ya(a)},Ze=function(a,b){return va(a,b)},$e=function(){return!1};var af=function(a){var b={"gtm.element":a,"gtm.elementClasses":a.className,"gtm.elementId":a["for"]||mb(a,"id")||"","gtm.elementTarget":a.formTarget||a.target||""};b["gtm.elementUrl"]=(a.attributes&&a.attributes.formaction?a.formAction:"")||a.action||a.href||a.src||a.code||a.codebase||"";return b},bf=function(a){Mc.hasOwnProperty("autoEventsSettings")||(Mc.autoEventsSettings={});var b=Mc.autoEventsSettings;b.hasOwnProperty(a)||(b[a]={});return b[a]},cf=function(a,b,c,d){var e=bf(a),g=Fa(e,b,d);e[b]=
c(g)},df=function(a,b,c){var d=bf(a);return Fa(d,b,c)};var ff=function(a,b){if(!ef)return null;if(Element.prototype.closest)try{return a.closest(b)}catch(e){return null}var c=Element.prototype.matches||Element.prototype.webkitMatchesSelector||Element.prototype.mozMatchesSelector||Element.prototype.msMatchesSelector||Element.prototype.oMatchesSelector,d=a;if(!B.documentElement.contains(d))return null;do{try{if(c.call(d,b))return d}catch(e){break}d=d.parentElement||d.parentNode}while(null!==d&&1===d.nodeType);return null},gf=!1;
if(B.querySelectorAll)try{var hf=B.querySelectorAll(":root");hf&&1==hf.length&&hf[0]==B.documentElement&&(gf=!0)}catch(a){}var ef=gf;var jf=function(a,b,c){for(var d=[],e=String(b||document.cookie).split(";"),g=0;g<e.length;g++){var h=e[g].split("="),k=h[0].replace(/^\s*|\s*$/g,"");if(k&&k==a){var l=h.slice(1).join("=").replace(/^\s*|\s*$/g,"");l&&c&&(l=decodeURIComponent(l));d.push(l)}}return d},mf=function(a,b,c,d){var e=kf(a,d);if(1===e.length)return e[0].id;if(0!==e.length){e=lf(e,function(a){return a.Dd},b);if(1===e.length)return e[0].id;e=lf(e,function(a){return a.se},c);return e[0]?e[0].id:void 0}},pf=function(a,b,c,d,e,
g){c=c||"/";var h=d=d||"auto",k=c;if(nf.test(document.location.hostname)||"/"===k&&of.test(h))return!1;g&&(b=encodeURIComponent(b));var l=b;l&&1200<l.length&&(l=l.substring(0,1200));b=l;var m=a+"="+b+"; path="+c+"; ";void 0!==e&&(m+="expires="+e.toUTCString()+"; ");if("auto"===d){var n=!1,p;a:{var q=[],r=document.location.hostname.split(".");if(4===r.length){var u=r[r.length-1];if(parseInt(u,10).toString()===u){p=["none"];break a}}for(var t=r.length-2;0<=t;t--)q.push(r.slice(t).join("."));q.push("none");
p=q}for(var A=p,C=0;C<A.length&&!n;C++)n=pf(a,b,c,A[C],e);return n}d&&"none"!==d&&(m+="domain="+d+";");var D=document.cookie;document.cookie=m;return D!=document.cookie||0<=jf(a).indexOf(b)};function lf(a,b,c){for(var d=[],e=[],g,h=0;h<a.length;h++){var k=a[h],l=b(k);l===c?d.push(k):void 0===g||l<g?(e=[k],g=l):l===g&&e.push(k)}return 0<d.length?d:e}
function kf(a,b){for(var c=[],d=jf(a),e=0;e<d.length;e++){var g=d[e].split("."),h=g.shift();if(!b||-1!==b.indexOf(h)){var k=g.shift();k&&(k=k.split("-"),c.push({id:g.join("."),Dd:1*k[0]||1,se:1*k[1]||1}))}}return c}var of=/^(www\.)?google(\.com?)?(\.[a-z]{2})?$/,nf=/(^|\.)doubleclick\.net$/i;var qf=function(){for(var a=eb.userAgent+(B.cookie||"")+(B.referrer||""),b=a.length,c=z.history.length;0<c;)a+=c--^b++;var d=1,e,g,h;if(a)for(d=0,g=a.length-1;0<=g;g--)h=a.charCodeAt(g),d=(d<<6&268435455)+h+(h<<14),e=d&266338304,d=0!=e?d^e>>21:d;return[Math.round(2147483647*Math.random())^d&2147483647,Math.round(Da()/1E3)].join(".")},tf=function(a,b,c,d){var e=rf(b);return mf(a,e,sf(c),d)};function rf(a){if(!a)return 1;a=0===a.indexOf(".")?a.substr(1):a;return a.split(".").length}
function sf(a){if(!a||"/"===a)return 1;"/"!==a[0]&&(a="/"+a);"/"!==a[a.length-1]&&(a+="/");return a.split("/").length-1}function uf(a,b){var c=""+rf(a),d=sf(b);1<d&&(c+="-"+d);return c};var vf=["1"],wf={},Af=function(a,b,c){var d=xf(a);wf[d]||yf(d,b,c)||(zf(d,qf(),b,c),yf(d,b,c))};function zf(a,b,c,d){var e;e=["1",uf(c,d),b].join(".");pf(a,e,d,c,new Date(Da()+7776E6))}function yf(a,b,c){var d=tf(a,b,c,vf);d&&(wf[a]=d);return d}function xf(a){return(a||"_gcl")+"_au"};function Bf(){for(var a=Cf,b={},c=0;c<a.length;++c)b[a[c]]=c;return b}function Df(){var a="ABCDEFGHIJKLMNOPQRSTUVWXYZ";a+=a.toLowerCase()+"0123456789-_";return a+"."}
var Cf,Ef,Ff=function(a){Cf=Cf||Df();Ef=Ef||Bf();for(var b=[],c=0;c<a.length;c+=3){var d=c+1<a.length,e=c+2<a.length,g=a.charCodeAt(c),h=d?a.charCodeAt(c+1):0,k=e?a.charCodeAt(c+2):0,l=g>>2,m=(g&3)<<4|h>>4,n=(h&15)<<2|k>>6,p=k&63;e||(p=64,d||(n=64));b.push(Cf[l],Cf[m],Cf[n],Cf[p])}return b.join("")},Gf=function(a){function b(b){for(;d<a.length;){var c=a.charAt(d++),e=Ef[c];if(null!=e)return e;if(!/^[\s\xa0]*$/.test(c))throw Error("Unknown base64 encoding at char: "+c);}return b}Cf=Cf||Df();Ef=Ef||
Bf();for(var c="",d=0;;){var e=b(-1),g=b(0),h=b(64),k=b(64);if(64===k&&-1===e)return c;c+=String.fromCharCode(e<<2|g>>4);64!=h&&(c+=String.fromCharCode(g<<4&240|h>>2),64!=k&&(c+=String.fromCharCode(h<<6&192|k)))}};var Hf;function If(a,b){if(!a||b===B.location.hostname)return!1;for(var c=0;c<a.length;c++)if(a[c]instanceof RegExp){if(a[c].test(b))return!0}else if(0<=b.indexOf(a[c]))return!0;return!1}var Jf=function(){var a=gb("google_tag_data",{}),b=a.gl;b&&b.decorators||(b={decorators:[]},a.gl=b);return b};var Kf=/(.*?)\*(.*?)\*(.*)/,Lf=/([^?#]+)(\?[^#]*)?(#.*)?/,Mf=/(.*?)(^|&)_gl=([^&]*)&?(.*)/,Of=function(a){var b=[],c;for(c in a)if(a.hasOwnProperty(c)){var d=a[c];void 0!==d&&d===d&&null!==d&&"[object Object]"!==d.toString()&&(b.push(c),b.push(Ff(String(d))))}var e=b.join("*");return["1",Nf(e),e].join("*")},Nf=function(a,b){var c=[window.navigator.userAgent,(new Date).getTimezoneOffset(),window.navigator.userLanguage||window.navigator.language,Math.floor((new Date).getTime()/60/1E3)-(void 0===b?0:
b),a].join("*"),d;if(!(d=Hf)){for(var e=Array(256),g=0;256>g;g++){for(var h=g,k=0;8>k;k++)h=h&1?h>>>1^3988292384:h>>>1;e[g]=h}d=e}Hf=d;for(var l=4294967295,m=0;m<c.length;m++)l=l>>>8^Hf[(l^c.charCodeAt(m))&255];return((l^-1)>>>0).toString(36)},Qf=function(){return function(a){var b=N(z.location.href),c=b.search.replace("?",""),d=ub(c,"_gl",!0)||"";a.query=Pf(d)||{};var e=vb(b,"fragment").match(Mf);a.fragment=Pf(e&&e[3]||"")||{}}},Pf=function(a){var b;b=void 0===b?3:b;try{if(a){var c=Kf.exec(a);if(c&&
"1"===c[1]){var d=c[3],e;a:{for(var g=c[2],h=0;h<b;++h)if(g===Nf(d,h)){e=!0;break a}e=!1}if(e){for(var k={},l=d?d.split("*"):[],m=0;m<l.length;m+=2)k[l[m]]=Gf(l[m+1]);return k}}}}catch(n){}};
function Rf(a,b,c){function d(a){var b=a,c=Mf.exec(b),d=b;if(c){var e=c[2],g=c[4];d=c[1];g&&(d=d+e+g)}a=d;var h=a.charAt(a.length-1);a&&"&"!==h&&(a+="&");return a+l}c=void 0===c?!1:c;var e=Lf.exec(b);if(!e)return"";var g=e[1],h=e[2]||"",k=e[3]||"",l="_gl="+a;c?k="#"+d(k.substring(1)):h="?"+d(h.substring(1));return""+g+h+k}
function Sf(a,b,c){for(var d={},e={},g=Jf().decorators,h=0;h<g.length;++h){var k=g[h];(!c||k.forms)&&If(k.domains,b)&&(k.fragment?Ha(e,k.callback()):Ha(d,k.callback()))}if(Ia(d)){var l=Of(d);if(c){if(a&&a.action){var m=(a.method||"").toLowerCase();if("get"===m){for(var n=a.childNodes||[],p=!1,q=0;q<n.length;q++){var r=n[q];if("_gl"===r.name){r.setAttribute("value",l);p=!0;break}}if(!p){var u=B.createElement("input");u.setAttribute("type","hidden");u.setAttribute("name","_gl");u.setAttribute("value",
l);a.appendChild(u)}}else if("post"===m){var t=Rf(l,a.action);sb.test(t)&&(a.action=t)}}}else Tf(l,a,!1)}if(!c&&Ia(e)){var A=Of(e);Tf(A,a,!0)}}function Tf(a,b,c){if(b.href){var d=Rf(a,b.href,void 0===c?!1:c);sb.test(d)&&(b.href=d)}}
var Uf=function(a){try{var b;a:{for(var c=a.target||a.srcElement||{},d=100;c&&0<d;){if(c.href&&c.nodeName.match(/^a(?:rea)?$/i)){b=c;break a}c=c.parentNode;d--}b=null}var e=b;if(e){var g=e.protocol;"http:"!==g&&"https:"!==g||Sf(e,e.hostname,!1)}}catch(h){}},Vf=function(a){try{var b=a.target||a.srcElement||{};if(b.action){var c=vb(N(b.action),"host");Sf(b,c,!0)}}catch(d){}},Wf=function(a,b,c,d){var e=Jf();e.init||(kb(B,"mousedown",Uf),kb(B,"keyup",Uf),kb(B,"submit",Vf),e.init=!0);var g={callback:a,
domains:b,fragment:"fragment"===c,forms:!!d};Jf().decorators.push(g)};var Xf=/^\w+$/,Yf=/^[\w-]+$/,Zf=/^~?[\w-]+$/,$f={aw:"_aw",dc:"_dc",gf:"_gf",ha:"_ha"},bg=function(a){var b=jf(a,B.cookie),c=[];if(!b||0==b.length)return c;for(var d=0;d<b.length;d++){var e=b[d].split(".");3==e.length&&"GCL"==e[0]&&e[1]&&c.push(e[2])}return ag(c)};function cg(a){return a&&"string"==typeof a&&a.match(Xf)?a:"_gcl"}
var dg=function(a){if(a){if("string"==typeof a){var b=cg(a);return{dc:b,aw:b,gf:b,ha:b}}if(a&&"object"==typeof a)return{dc:cg(a.dc),aw:cg(a.aw),gf:cg(a.gf),ha:cg(a.ha)}}return{dc:"_gcl",aw:"_gcl",gf:"_gcl",ha:"_gcl"}},eg=function(){var a=N(z.location.href),b={},c=function(a,c){b[c]||(b[c]=[]);b[c].push(a)},d=vb(a,"query",!1,void 0,"gclid"),e=vb(a,"query",!1,void 0,"gclsrc");if(!d||!e){var g=a.hash.replace("#","");d=d||ub(g,"gclid");e=e||ub(g,"gclsrc")}if(void 0!==d&&d.match(Yf))switch(e){case void 0:c(d,
"aw");break;case "aw.ds":c(d,"aw");c(d,"dc");break;case "ds":c(d,"dc");break;case "gf":c(d,"gf");break;case "ha":c(d,"ha")}var h=vb(a,"query",!1,void 0,"dclid");h&&c(h,"dc");return b},gg=function(a){function b(a,b){var g=fg(a,c);g&&pf(g,b,e,d,h,!0)}a=a||{};var c=dg(a.prefix),d=a.domain||"auto",e=a.path||"/",g=Da(),h=new Date(g+7776E6),k=Math.round(g/1E3),l=eg(),m=function(a){return["GCL",k,a].join(".")};l.aw&&(!0===a.xf?b("aw",m("~"+l.aw[0])):b("aw",m(l.aw[0])));l.dc&&b("dc",m(l.dc[0]));l.gf&&b("gf",
m(l.gf[0]));l.ha&&b("ha",m(l.ha[0]))},fg=function(a,b){var c=$f[a];if(void 0!==c){var d=b[a];if(void 0!==d)return d+c}},hg=function(a){var b=a.split(".");return 3!==b.length||"GCL"!==b[0]?0:1E3*(Number(b[1])||0)},ig=function(a,b,c,d,e){if(ua(b)){var g=dg(e);Wf(function(){for(var b={},c=0;c<a.length;++c){var d=fg(a[c],g);if(d){var e=jf(d,B.cookie);e.length&&(b[d]=e.sort()[e.length-1])}}return b},b,c,d)}},ag=function(a){return a.filter(function(a){return Zf.test(a)})};var jg=/^\d+\.fls\.doubleclick\.net$/;function kg(a){var b=N(z.location.href),c=vb(b,"host",!1);if(c&&c.match(jg)){var d=vb(b,"path").split(a+"=");if(1<d.length)return d[1].split(";")[0].split("?")[0]}}
var lg=function(a){var b=kg("gclaw");if(b)return b.split(".");var c=dg(a);if("_gcl"==c.aw){var d=eg().aw||[];if(0<d.length)return d}var e=fg("aw",c);return e?bg(e):[]},mg=function(a){var b=kg("gcldc");if(b)return b.split(".");var c=dg(a);if("_gcl"==c.dc){var d=eg().dc||[];if(0<d.length)return d}var e=fg("dc",c);return e?bg(e):[]},ng=function(a){var b=dg(a);if("_gcl"==b.ha){var c=eg().ha||[];if(0<c.length)return c}return bg(b.ha+"_ha")},og=function(){var a=kg("gac");if(a)return decodeURIComponent(a);
for(var b=[],c=B.cookie.split(";"),d=/^\s*_gac_(UA-\d+-\d+)=\s*(.+?)\s*$/,e=0;e<c.length;e++){var g=c[e].match(d);g&&b.push({Db:g[1],value:g[2]})}var h={};if(b&&b.length)for(var k=0;k<b.length;k++){var l=b[k].value.split(".");"1"==l[0]&&3==l.length&&l[1]&&(h[b[k].Db]||(h[b[k].Db]=[]),h[b[k].Db].push({timestamp:l[1],Kd:l[2]}))}var m=[],n;for(n in h)if(h.hasOwnProperty(n)){for(var p=[],q=h[n],r=0;r<q.length;r++)p.push(q[r].Kd);p=ag(p);p.length&&m.push(n+":"+p.join(","))}return m.join(";")},pg=function(a,
b,c){Af(a,b,c);var d=wf[xf(a)],e=eg().dc||[];if(d&&0<e.length){var g=Mc.joined_au=Mc.joined_au||{},h=a||"_gcl";if(!g[h]){for(var k=!1,l=0;l<e.length;l++){var m="https://adservice.google.com/ddm/regclk";m+="?gclid="+e[l]+"&auiddc="+d;rb(m);k=!0}if(k){var n=xf(a);wf[n]&&zf(n,wf[n],b,c);g[h]=!0}}}};var rg={"":"n",UA:"u",AW:"a",DC:"d",G:"e",GF:"f",HA:"h",GTM:qg()};function qg(){if(3===Lc.Na.length)return"g";return"g";return"G"}var sg=function(a){var b=Lc.w.split("-"),c=b[0].toUpperCase(),d=rg[c]||"i",e=a&&"GTM"===c?b[1]:"",g;if(3===Lc.Na.length){var h;a:{h=De()?"s":"o";break a;h="w"}g="2"+h}else g="";return g+d+Lc.Na+e};
var tg=function(a){return!(void 0===a||null===a||0===(a+"").length)},ug=function(a,b){var c;if(2===b.J)return a("ord",wa(1E11,1E13)),!0;if(3===b.J)return a("ord","1"),a("num",wa(1E11,1E13)),!0;if(4===b.J)return tg(b.sessionId)&&a("ord",b.sessionId),!0;if(5===b.J)c="1";else if(6===b.J)c=b.Gc;else return!1;tg(c)&&a("qty",c);tg(b.jb)&&a("cost",b.jb);tg(b.Eb)&&a("ord",b.Eb);return!0},vg=encodeURIComponent,wg=function(a,b){function c(a,b,c){g.hasOwnProperty(a)||(b+="",e+=";"+a+"="+(c?b:vg(b)))}var d=a.lb,
e=a.protocol;e+=a.$a?"//"+d+".fls.doubleclick.net/activityi":"//ad.doubleclick.net/activity";e+=";src="+vg(d)+(";type="+vg(a.mb))+(";cat="+vg(a.va));var g=a.xd||{},h;for(h in g)g.hasOwnProperty(h)&&(e+=";"+vg(h)+"="+vg(g[h]+""));if(ug(c,a)){tg(a.Gb)&&c("u",a.Gb);tg(a.tran)&&c("tran",a.tran);c("gtm",sg());!1===a.Zc&&c("npa","1");if(a.ib){var k=mg(a.ia);k&&k.length&&c("gcldc",k.join("."));var l=lg(a.ia);l&&l.length&&c("gclaw",l.join("."));var m=og();m&&c("gac",m);
Af(a.ia);var n=wf[xf(a.ia)];n&&c("auiddc",n);}tg(a.vb)&&c("prd",a.vb,!0);for(var p in a.Ga)a.Ga.hasOwnProperty(p)&&c(p,a.Ga[p]);e+=b||"";tg(a.Wa)&&c("~oref",a.Wa);a.$a?jb(e+"?",a.S):G(e+"?",a.S,a.la)}else I(a.la)};
var yg=function(a){if(a)try{if(a.conversion_id&&a.conversion_data){var b="/pagead/conversion/"+xg(a.conversion_id)+"/?",c=xg(JSON.stringify(a.conversion_data)),d="https://www.googletraveladservices.com/travel/flights/clk"+b+"conversion_data="+c;if(a.conversionLinkerEnabled){var e;a:{var g=dg(a.conversionPrefix);if("_gcl"==g.gf){var h=eg().gf||[];if(0<h.length){e=h;break a}}var k=fg("gf",g);e=k?bg(k):[]}var l=e;if(l&&l.length)for(var m=0;m<l.length;m++)d+="&gclgf="+xg(l[m])}G(d,a.onSuccess,a.onFailure)}}catch(n){}},
xg=function(a){return null===a||void 0===a||0===String(a).length?"":encodeURIComponent(String(a))};var zg=!!z.MutationObserver,Ag=void 0,Bg=function(a){if(!Ag){var b=function(){var a=B.body;if(a)if(zg)(new MutationObserver(function(){for(var a=0;a<Ag.length;a++)I(Ag[a])})).observe(a,{childList:!0,subtree:!0});else{var b=!1;kb(a,"DOMNodeInserted",function(){b||(b=!0,I(function(){b=!1;for(var a=0;a<Ag.length;a++)I(Ag[a])}))})}};Ag=[];B.body?b():I(b)}Ag.push(a)};var Cg=/\./g,Dg=/\*/g;var Ug="www.googletagmanager.com/gtm.js";Ug="www.googletagmanager.com/gtag/js";
var Vg=Ug,Wg=function(a,b,c,d){kb(a,b,c,d)},Xg=function(a,b){return z.setTimeout(a,b)},S=function(a,b,c){if(De()){b&&I(b)}else return ib(a,b,c)},Yg=function(){return z.location.href},Zg=function(a){return vb(N(a),"fragment")},$g=function(a,b,c,d,e){return vb(a,b,c,d,e)},U=function(a,b){return Wc(a,b||2)},ah=function(a,b,c){b&&(a.eventCallback=b,c&&(a.eventTimeout=c));return z["dataLayer"].push(a)},bh=function(a,
b){z[a]=b},V=function(a,b,c){b&&(void 0===z[a]||c&&!z[a])&&(z[a]=b);return z[a]},ch=function(a,b,c){return jf(a,b,void 0===c?!0:!!c)},dh=function(a,b,c){gg({prefix:a,path:b,domain:c})},eh=function(a,b,c,d){var e=Qf(),g=Jf();g.data||(g.data={query:{},fragment:{}},e(g.data));var h={},k=g.data;k&&(Ha(h,k.query),Ha(h,k.fragment));for(var l=dg(b),m=0;m<a.length;++m){var n=a[m];if(void 0!==$f[n]){var p=fg(n,l),q=h[p];if(q){var r=Math.min(hg(q),Da()),u;b:{for(var t=
r,A=jf(p,B.cookie),C=0;C<A.length;++C)if(hg(A[C])>t){u=!0;break b}u=!1}u||pf(p,q,c,d,new Date(r+7776E6),!0)}}}},fh=function(a,b,c,d,e){ig(a,b,c,d,e);},gh=function(a,b,c){var d;a:{var e;e=c||100;for(var g={},h=0;h<b.length;h++)g[b[h]]=!0;for(var k=a,l=0;k&&l<=e;l++){if(g[String(k.tagName).toLowerCase()]){d=k;break a}k=k.parentElement}d=null}return d},X=function(a,b,c,d){var e= | ua(a))for(var d=0;d<a.length;d++)if(a[d]&&b(a[d])){c=a[d];break a}c=void 0}return c},nh=function(a,b,c,d){cf(a,b,c,d)},oh=function(a,b,c){return df(a,b,c)},ph=function(a){return!!df(a,"init",!1)},qh=function(a){bf(a).init=!0};
var hh=function(){var a=Vg;if(fb){var b=fb.toLowerCase();if(0===b.indexOf("https://"))return 2;if(0===b.indexOf("http://"))return 3}a=a.toLowerCase();for(var c="https://"+a,d="http://"+a,e=1,g=B.getElementsByTagName("script"),h=0;h<g.length&&100>h;h++){var k=g[h].src;if(k){k=k.toLowerCase();if(0===k.indexOf(d))return 3;1===e&&0===k.indexOf(c)&&(e=2)}}return e};
var sh=function(a,b){return Yc(a,b,void 0)},th=function(a,b,c,d){var e={config:a,gtm:sg(void 0)};c&&(Af(d),e.auiddc=wf[xf(d)]);b&&(e.loadInsecure=b);V("__dc_ns_processor",[]).push(e);S((b?"http":"https")+"://www.googletagmanager.com/dclk/ns/v1.js")};
var uh=function(a,b,c){var d=(void 0===c?0:c)?"www.googletagmanager.com/gtag/js":Vg;d+="?id="+encodeURIComponent(a)+"&l=dataLayer";if(b)for(var e in b)b[e]&&b.hasOwnProperty(e)&&(d+="&"+e+"="+encodeURIComponent(b[e]));S(X("https://","http://",d))};
var wh=function(a,b,c){a instanceof xe.Rc&&(a=a.resolve(xe.Ce(b,c)),b=qa);return{nb:a,S:b}};var Jh=function(a,b,c){this.n=a;this.t=b;this.p=c},Kh=function(){this.c=1;this.e=[];this.p=null};function Lh(a){var b=Mc,c=b.gss=b.gss||{};return c[a]=c[a]||new Kh}var Mh=function(a,b){Lh(a).p=b},Nh=function(a,b,c){var d=Math.floor(Da()/1E3);Lh(a).e.push(new Jh(b,d,c))},Oh=function(a){};var Xh=window,Yh=document,Zh=function(a){var b=Xh._gaUserPrefs;if(b&&b.ioo&&b.ioo()||a&&!0===Xh["ga-disable-"+a])return!0;try{var c=Xh.external;if(c&&c._gaUserPrefs&&"oo"==c._gaUserPrefs)return!0}catch(g){}for(var d=jf("AMP_TOKEN",Yh.cookie,!0),e=0;e<d.length;e++)if("$OPT_OUT"==d[e])return!0;return!1};var di=function(a){if(1===Lh(a).c){Lh(a).c=2;var b=encodeURIComponent(a);ib(("http:"!=z.location.protocol?"https:":"http:")+("//www.googletagmanager.com/gtag/js?id="+b+"&l=dataLayer&cx=c"))}},ei=function(a,b){};var Z={a:{}};
Z.a.gtagha=["google"],function(){function a(a){function b(a,b){void 0!==b&&c.push(a+"="+b)}if(void 0===a)return"";var c=[];b("hct_base_price",a.jc);b("hct_booking_xref",a.kc);b("hct_checkin_date",a.Rd);b("hct_checkout_date",a.Sd);b("hct_currency_code",a.Td);b("hct_partner_hotel_id",a.mc);b("hct_total_price",a.nc);return c.join(";")}function b(b,c,d,k){var e=encodeURIComponent(b),g=encodeURIComponent(a(c)),h="https://www.googletraveladservices.com/travel/clk/pagead/conversion/"+e+"/?data="+g;d&&(h+=
ng(k).map(function(a){return"&gclha="+encodeURIComponent(a)}).join(""));return h}function c(a,b,c,d){var e={};sa(a)?e.kc=a:"number"===typeof a&&(e.kc=String(a));sa(c)&&(e.Td=c);sa(b)?e.nc=e.jc=b:"number"===typeof b&&(e.nc=e.jc=String(b));if(!ua(d)||0==d.length)return e;var g=d[0];if(!Pa(g))return e;sa(g.id)?e.mc=g.id:"number"===typeof g.id&&(e.mc=String(g.id));sa(g.start_date)&&(e.Rd=g.start_date);sa(g.end_date)&&(e.Sd=g.end_date);return e}function d(a){var b=Oc,e=a.vtp_gtmOnSuccess,k=a.vtp_gtmOnFailure,
l=a.vtp_conversionId,m=l.containerId,n=function(a){return Yc(a,m,l.id)},p=!1!==n("conversion_linker"),q=n("conversion_cookie_prefix");if("gtag.config"===b)p&&dh(q),I(e);else if("purchase"===b){var r=c(n("transaction_id"),n("value"),n("currency"),n("items"));d.Pe(l.W[0],r,p,q,e,k)}else I(k)}d.Pe=function(a,c,d,k,l,m){if(/^\d+$/.test(a)){var e=b(a,c,d,k);G(e,l,m)}else I(m)};Z.__gtagha=d;Z.__gtagha.g="gtagha";Z.__gtagha.h=!0;Z.__gtagha.b=0}();Z.a.e=["google"],function(){(function(a){Z.__e=a;Z.__e.g="e";Z.__e.h=!0;Z.__e.b=0})(function(){return Oc})}();
Z.a.v=["google"],function(){(function(a){Z.__v=a;Z.__v.g="v";Z.__v.h=!0;Z.__v.b=0})(function(a){var b=a.vtp_name;if(!b||!b.replace)return!1;var c=U(b.replace(/\\\./g,"."),a.vtp_dataLayerVersion||1);return void 0!==c?c:a.vtp_defaultValue})}();
Z.a.gtagaw=["google"],function(){var a=!1,b=!1,c=[],d=["aw","dc"],e="send_to aw_remarketing aw_remarketing_only custom_params send_page_view language value currency transaction_id user_id conversion_linker conversion_cookie_prefix page_location page_referrer phone_conversion_number phone_conversion_callback phone_conversion_css_class items aw_merchant_id aw_feed_country aw_feed_language discount disable_merchant_reported_purchases allow_ad_personalization_signals".split(" "),g=function(a){var b=V("google_trackConversion"),
c=a.gtm_onFailure;"function"==typeof b?b(a)||c():c()},h=function(){for(;0<c.length;)g(c.shift())},k=function(){a||(a=!0,S(X("https://","http://","www.googleadservices.com/pagead/conversion_async.js"),function(){h();c={push:g}},function(){h();a=!1}))},l=function(a,c,d,e){if(De()){}else if(c){var g=a.W[0],h=a.W[1],k=V("_googWcmImpl",function(){k.q=k.q||[];k.q.push(arguments)});V("_googWcmAk",g);b||(b=!0,S(X("https://",
"http://","www.gstatic.com/wcm/loader.js")));var l={ak:g,cl:h};void 0===d&&(l.autoreplace=c);k(2,d,l,c,e,new Date,e)}},m=function(a){if(a){for(var b=[],c=0;c<a.length;++c){var d=a[c];d&&b.push({item_id:d.id,quantity:d.quantity,value:d.price,start_date:d.start_date,end_date:d.end_date})}return b}},n=function(a){var b=a.vtp_conversionId,g=Oc,h="gtag.config"==g,n=b.W[0],p=b.W[1],C=void 0!==p,D=b.containerId,L=C?b.id:void 0,E=function(a){return Yc(a,D,L)},F=!1!==E("conversion_linker"),J=E("conversion_cookie_prefix");
if(h){var H=E("linker")||{};F&&((H.accept_incoming||!1!==H.accept_incoming&&H.domains)&&eh(d,J),dh(J));H.domains&&fh(d,H.domains,H.url_position,!!H.decorate_forms,J);if(C){var K=E("phone_conversion_number"),R=E("phone_conversion_callback"),ja=E("phone_conversion_css_class"),W=E("phone_conversion_options");l(b,K,R||ja,W)}}var ba=!1===E("aw_remarketing")||!1===E("send_page_view");if(!h||!C&&!ba)if(!0===E("aw_remarketing_only")&&(C=!1),!1!==E("allow_ad_personalization_signals")||C){var M={google_conversion_id:n,
google_remarketing_only:!C,onload_callback:a.vtp_gtmOnSuccess,gtm_onFailure:a.vtp_gtmOnFailure,google_conversion_format:"3",google_conversion_color:"ffffff",google_conversion_domain:"",google_conversion_label:p,google_conversion_language:E("language"),google_conversion_value:E("value"),google_conversion_currency:E("currency"),google_conversion_order_id:E("transaction_id"),google_user_id:E("user_id"),google_conversion_page_url:E("page_location"),google_conversion_referrer_url:E("page_referrer"),google_gtm:sg(void 0)};
!1===E("allow_ad_personalization_signals")&&(M.google_allow_ad_personalization_signals=!1);M.google_read_gcl_cookie_opt_out=!F;F&&J&&(Pa(J)?M.google_gcl_cookie_prefix=J.aw:M.google_gcl_cookie_prefix=J);var T=function(){var a=E("custom_params"),b={event:g};if(ua(a)){for(var c=0;c<a.length;++c){var d=a[c],h=E(d);void 0!==h&&(b[d]=h)}return b}var k=E("eventModel");if(!k)return null;Qa(k,b);for(var l=0;l<e.length;++l)delete b[e[l]];return b}();T&&(M.google_custom_params=T);!C&&E("items")&&(M.google_gtag_event_data=
{items:E("items"),value:E("value")});if(C&&"purchase"==g){E("aw_merchant_id")&&(M.google_conversion_merchant_id=E("aw_merchant_id"),M.google_basket_feed_country=E("aw_feed_country"),M.google_basket_feed_language=E("aw_feed_language"),M.google_basket_discount=E("discount"),M.google_basket_transaction_type=g,M.google_disable_merchant_reported_conversions=!0===E("disable_merchant_reported_purchases"),De()&&(M.google_disable_merchant_reported_conversions=!0));var O=m(E("items"));O&&(M.google_conversion_items=
O)}c.push(M)}k()};Z.__gtagaw=n;Z.__gtagaw.g="gtagaw";Z.__gtagaw.h=!0;Z.__gtagaw.b=0}();
Z.a.get=["google"],function(){(function(a){Z.__get=a;Z.__get.g="get";Z.__get.h=!0;Z.__get.b=0})(function(a){if(a.vtp_isAutoTag){for(var b=String(a.vtp_trackingId),c=Oc||"",d={},e=0;e<be.length;e++){var g=sh(be[e],b);void 0!==g&&(d[be[e]]=g)}var h=sh("custom_params",b);if(ua(h))for(var k=0;k<h.length;k++){var l=h[k],m=sh(l,b);void 0!==m&&(d[l]=m)}else{var n=U("eventModel");Qa(n,d)}var p=Qa(d,void 0);di(b);Nh(b,c,p);Oh(b)}else{var q=a.vtp_settings,r=q.eventParameters,u=q.userProperties,t=Q(a.vtp_eventParameters,
"name","value");Qa(t,r);var A=Q(a.vtp_userProperties,"name","value");Qa(A,u);r.user_properties=u;var C=String(q.streamId),D=String(a.vtp_eventName);di(C);Nh(C,D,r);Oh(C)}a.vtp_gtmOnSuccess()})}();
Z.a.gtagfl=[],function(){function a(a){var b=/^DC-(\d+)(\/([\w-]+)\/([\w-]+)\+(\w+))?$/.exec(a);if(b){var c={standard:2,unique:3,per_session:4,transactions:5,items_sold:6,"":1}[(b[5]||"").toLowerCase()];if(c)return{containerId:"DC-"+b[1],Lc:b[3]?a:"",Uc:b[1],Tc:b[3]||"",va:b[4]||"",J:c}}}function b(a,b){function c(b,c,e){void 0!==e&&0!==(e+"").length&&d.push(b+c+":"+a(e+""))}var d=[],e=b("items")||[];if(ua(e))for(var g=0;g<e.length;g++){var n=e[g],p=g+1;c("i",p,n.id);c("p",p,n.price);c("q",p,n.quantity);
c("c",p,b("country"));c("l",p,b("language"))}return d.join("|")}function c(a,b,c){var d=/^u([1-9]\d?|100)$/,e=a("custom_map")||{},g=ad(b,c),h={},p={};if(Pa(e))for(var q in e)if(e.hasOwnProperty(q)&&d.test(q)){var r=e[q];sa(r)&&(h[q]=r)}for(var u=0;u<g.length;u++){var t=g[u];d.test(t)&&(h[t]=t)}for(var A in h)h.hasOwnProperty(A)&&(p[A]=a(h[A]));return p}var d=["aw","dc"];(function(a){Z.__gtagfl=a;Z.__gtagfl.g="gtagfl";Z.__gtagfl.h=!0;Z.__gtagfl.b=0})(function(e){var g=e.vtp_gtmOnSuccess,h=e.vtp_gtmOnFailure,
k=a(e.vtp_targetId);if(k){var l=function(a){return Yc(a,k.containerId,k.Lc||void 0)},m=!1!==l("conversion_linker"),n=l("conversion_cookie_prefix"),p=l("dc_natural_search"),q=3===hh();if("gtag.config"===Oc){var r=l("linker")||{};m&&((r.accept_incoming||!1!==r.accept_incoming&&r.domains)&&eh(d,n),dh(n),pg(n,void 0,void 0));r.domains&&fh(d,r.domains,r.url_position,!!r.decorate_forms,n);if(p&&p.exclusion_parameters&&p.engines){}I(g)}else{var u=
{},t=l("dc_custom_params");if(Pa(t))for(var A in t)if(t.hasOwnProperty(A)){var C=t[A];void 0!==C&&null!==C&&(u[A]=C)}var D="";if(5===k.J||6===k.J)D=b(Ve,l);var L=c(l,k.containerId,k.Lc),E=!0===l("allow_custom_scripts");if(De()&&E){E=!1}var F={va:k.va,ib:m,ia:n,jb:l("value"),J:k.J,xd:u,lb:k.Uc,mb:k.Tc,la:h,S:g,Wa:wb(N(Yg())),vb:D,protocol:q?"http:":"https:",Gc:l("quantity"),$a:E,sessionId:l("session_id"),Eb:l("transaction_id"),
Ga:L,Zc:!1!==l("allow_ad_personalization_signals")};wg(F,void 0)}}else I(h)})}();
Z.a.gtaggf=["google"],function(){var a=/.*\.google\.com(:\d+)?\/booking\/flights.*/,b=function(a){if(a){for(var b=[],c=0,g=0;g<a.length;++g){var h=a[g];!h||void 0!==h.category&&""!==h.category&&"FlightSegment"!==h.category||(b[c]={cabin:h.travel_class,fare_product:h.fare_product,booking_code:h.booking_code,flight_number:h.flight_number,origin:h.origin,destination:h.destination,departure_date:h.start_date},c++)}return b}};(function(a){Z.__gtaggf=a;Z.__gtaggf.g="gtaggf";Z.__gtaggf.h=!0;Z.__gtaggf.b=
0})(function(c){var d=Oc,e=c.vtp_gtmOnSuccess,g=c.vtp_gtmOnFailure,h=c.vtp_conversionId,k=h.W[0],l=h.containerId,m=function(a){return Yc(a,l,h.id)},n=!1!==m("conversion_linker"),p=m("conversion_cookie_prefix");if("gtag.config"===d)n&&dh(p),I(e);else{var q={conversion_id:k,onFailure:g,onSuccess:e,conversionLinkerEnabled:n,conversionPrefix:p};if("purchase"===d){var r=a.test(Yg()),u={partner_id:k,trip_type:m("trip_type"),total_price:m("value"),currency:m("currency"),is_direct_booking:r,flight_segment:b(m("items"))},
t=m("passengers");t&&"object"===typeof t&&(u.passengers_total=t.total,u.passengers_adult=t.adult,u.passengers_child=t.child,u.passengers_infant_in_seat=t.infant_in_seat,u.passengers_infant_in_lap=t.infant_in_lap);q.conversion_data=u}yg(q)}})}();
Z.a.gtagua=["google"],function(){var a,b={client_id:1,client_storage:"storage",cookie_name:1,cookie_domain:1,cookie_expires:1,cookie_path:1,cookie_update:1,sample_rate:1,site_speed_sample_rate:1,use_amp_client_id:1,store_gac:1,conversion_linker:"storeGac"},c={anonymize_ip:1,app_id:1,app_installer_id:1,app_name:1,app_version:1,campaign:{name:"campaignName",source:"campaignSource",medium:"campaignMedium",term:"campaignTerm",content:"campaignContent",id:"campaignId"},currency:"currencyCode",description:"exDescription",
fatal:"exFatal",language:1,non_interaction:1,page_hostname:"hostname",page_referrer:"referrer",page_path:"page",page_location:"location",page_title:"title",screen_name:1,transport_type:"transport",user_id:1},d={content_id:1,event_category:1,event_action:1,event_label:1,link_attribution:1,linker:1,method:1,name:1,send_page_view:1,value:1},e={cookie_name:1,cookie_expires:"duration",levels:1},g={anonymize_ip:1,fatal:1,non_interaction:1,use_amp_client_id:1,send_page_view:1,store_gac:1,conversion_linker:1},
h=function(a,b,c,d){if(void 0!==c)if(g[b]&&(c=Aa(c)),"anonymize_ip"!=b||c||(c=void 0),1===a)d[k(b)]=c;else if(sa(a))d[a]=c;else for(var e in a)a.hasOwnProperty(e)&&void 0!==c[e]&&(d[a[e]]=c[e])},k=function(a){return a&&sa(a)?a.replace(/(_[a-z])/g,function(a){return a[1].toUpperCase()}):a},l=function(a,b,c){a.hasOwnProperty(b)||(a[b]=c)},m=function(a,e,g){var k={},m={},n={},p;var q=sh("experiments",a);if(ua(q)){for(var t=[],r=0;r<q.length;r++){var u=q[r];if(void 0!=u){var A=u.id,ja=u.variant;void 0!=
A&&void 0!=ja&&t.push(String(A)+"."+String(ja))}}p=0<t.length?t.join("!"):void 0}else p=void 0;p&&l(m,"exp",p);var W=sh("custom_map",a);if(Pa(W))for(var ba in W)if(W.hasOwnProperty(ba)&&/^(dimension|metric)\d+$/.test(ba)){var M=sh(W[ba],a);void 0!==M&&l(m,ba,M)}for(var T=ad(a,void 0),O=0;O<T.length;++O){var Y=T[O],ca=sh(Y,a);d.hasOwnProperty(Y)?h(d[Y],Y,ca,k):c.hasOwnProperty(Y)?h(c[Y],Y,ca,m):b.hasOwnProperty(Y)?h(b[Y],Y,ca,n):/^(dimension|metric|content_group)\d+$/.test(Y)&&h(1,Y,ca,m)}var da=String(Oc);
l(n,"cookieDomain","auto");l(m,"forceSSL",!0);var xa="general";0<=Ze("add_payment_info add_to_cart add_to_wishlist begin_checkout checkout_progress purchase refund remove_from_cart set_checkout_option".split(" "),da)?xa="ecommerce":0<=Ze("generate_lead login search select_content share sign_up view_item view_item_list view_promotion view_search_results".split(" "),da)?xa="engagement":"exception"==da&&(xa="error");l(k,"eventCategory",xa);0<=Ze(["view_item","view_item_list","view_promotion","view_search_results"],
da)&&l(m,"nonInteraction",!0);"login"==da||"sign_up"==da||"share"==da?l(k,"eventLabel",sh("method",a)):"search"==da||"view_search_results"==da?l(k,"eventLabel",sh("search_term",a)):"select_content"==da&&l(k,"eventLabel",sh("content_type",a));var za=k.linker||{};if(za.accept_incoming||0!=za.accept_incoming&&za.domains)n.allowLinker=!0;if(!1===sh("allow_display_features",a)||!1===sh("allow_ad_personalization_signals",a))m.allowAdFeatures=!1;n.name=e;m[">m"]=sg(!0);m.hitCallback=g;k.R=m;k.ac=n;return k},
n=function(a){function b(a){var b=Qa(a,void 0);b.list=a.list_name;b.listPosition=a.list_position;b.position=a.list_position||a.creative_slot;b.creative=a.creative_name;return b}function c(a){for(var c=[],d=0;a&&d<a.length;d++)a[d]&&c.push(b(a[d]));return c.length?c:void 0}function d(a){return{id:e("transaction_id"),affiliation:e("affiliation"),revenue:e("value"),tax:e("tax"),shipping:e("shipping"),coupon:e("coupon"),list:e("list_name")||a}}for(var e=function(b){return Yc(b,a,void 0)},g=e("items"),
h,k=0;g&&k<g.length&&!(h=g[k].list_name);k++);var m=e("custom_map");if(Pa(m))for(k=0;g&&k<g.length;++k){var n=g[k],p;for(p in m)m.hasOwnProperty(p)&&/^(dimension|metric)\d+$/.test(p)&&l(n,p,n[m[p]])}var q=null,r=Oc,u=e("promotions");"purchase"==r||"refund"==r?q={action:r,sa:d(),ma:c(g)}:"add_to_cart"==r?q={action:"add",ma:c(g)}:"remove_from_cart"==r?q={action:"remove",ma:c(g)}:"view_item"==r?q={action:"detail",sa:d(h),ma:c(g)}:"view_item_list"==r?q={action:"impressions",Xd:c(g)}:"view_promotion"==
r?q={action:"promo_view",wb:c(u)}:"select_content"==r&&u&&0<u.length?q={action:"promo_click",wb:c(u)}:"select_content"==r?q={action:"click",sa:{list:e("list_name")||h},ma:c(g)}:"begin_checkout"==r||"checkout_progress"==r?q={action:"checkout",ma:c(g),sa:{step:"begin_checkout"==r?1:e("checkout_step"),option:e("checkout_option")}}:"set_checkout_option"==r&&(q={action:"checkout_option",sa:{step:e("checkout_step"),option:e("checkout_option")}});q&&(q.cf=e("currency"));return q},p={},q=function(a,b){var c=
p[a];p[a]=Qa(b,void 0);if(!c)return!1;for(var d in b)if(b.hasOwnProperty(d)&&b[d]!==c[d])return!0;for(d in c)if(c.hasOwnProperty(d)&&c[d]!==b[d])return!0;return!1},r=function(b){var c=b.vtp_trackingId,d=ud(void 0),g="gtag_"+c.split("-").join("_"),p=function(a){var b=[].slice.call(arguments,0);b[0]=g+"."+b[0];d.apply(window,b)},r=function(){var a=function(a,b){for(var c=0;b&&c<b.length;c++)p(a,b[c])},b=n(c);if(b){var d=b.action;if("impressions"==d)a("ec:addImpression",b.Xd);else if("promo_click"==
d||"promo_view"==d){var e=b.wb;a("ec:addPromo",b.wb);e&&0<e.length&&"promo_click"==d&&p("ec:setAction",d)}else a("ec:addProduct",b.ma),p("ec:setAction",d,b.sa)}},u=function(){if(De()){}else{var a=sh("optimize_id",c);a&&(p("require",a,{dataLayer:"dataLayer"}),p("require","render"))}},F=m(c,g,b.vtp_gtmOnSuccess);q(g,F.ac)&&d(function(){td()&&td().remove(g)});d("create",c,F.ac);(function(){var a=sh("custom_map",c);
d(function(){if(Pa(a)){var b=F.R,c=td().getByName(g),d;for(d in a)if(a.hasOwnProperty(d)&&/^(dimension|metric)\d+$/.test(d)){var e=c.get(k(a[d]));l(b,d,e)}}})})();(function(a){if(a){var b={};if(Pa(a))for(var c in e)e.hasOwnProperty(c)&&h(e[c],c,a[c],b);p("require","linkid",b)}})(F.linkAttribution);var J=F.linker;J&&J.domains&&wd(g+".",J.domains,!!J.use_anchor,!!J.decorate_forms);var H=function(a,b,c){c&&(b=""+b);F.R[a]=b},K=Oc;"page_view"==K?(u(),p("send","pageview",F.R)):"gtag.config"==K?(u(),0!=
F.sendPageView&&p("send","pageview",F.R)):"screen_view"==K?p("send","screenview",F.R):"timing_complete"==K?(H("timingCategory",F.eventCategory,!0),H("timingVar",F.name,!0),H("timingValue",ya(F.value)),void 0!==F.eventLabel&&H("timingLabel",F.eventLabel,!0),p("send","timing",F.R)):"exception"==K?p("send","exception",F.R):(0<=Ze("view_item_list select_content view_item add_to_cart remove_from_cart begin_checkout set_checkout_option purchase refund view_promotion checkout_progress".split(" "),K)&&(p("require",
"ec","ec.js"),r()),H("eventCategory",F.eventCategory,!0),H("eventAction",F.eventAction||K,!0),void 0!==F.eventLabel&&H("eventLabel",F.eventLabel,!0),void 0!==F.value&&H("eventValue",ya(F.value)),p("send","event",F.R));a||(a=!0,S("https://www.google-analytics.com/analytics.js",function(){td().loaded||b.vtp_gtmOnFailure()},b.vtp_gtmOnFailure))};Z.__gtagua=r;Z.__gtagua.g="gtagua";Z.__gtagua.h=!0;Z.__gtagua.b=
0}();
var fi={macro:function(a){if(xe.fb.hasOwnProperty(a))return xe.fb[a]}};fi.dataLayer=Xc;fi.onHtmlSuccess=xe.Zb(!0);fi.onHtmlFailure=xe.Zb(!1);fi.callback=function(a){Qc.hasOwnProperty(a)&&ra(Qc[a])&&Qc[a]();delete Qc[a]};fi.gd=function(){Mc[Lc.w]=fi;Rc=Z.a;uc=uc||xe;vc=hd};
fi.Yd=function(){Mc=z.google_tag_manager=z.google_tag_manager||{};if(Mc[Lc.w]){var a=Mc.zones;a&&a.unregisterChild(Lc.w)}else{for(var b=data.resource||{},c=b.macros||[],d=0;d<c.length;d++)nc.push(c[d]);for(var e=b.tags||[],g=0;g<e.length;g++)qc.push(e[g]);for(var h=b.predicates||[],k=0;k<h.length;k++)pc.push(h[k]);for(var l=b.rules||[],m=0;m<l.length;m++){for(var n=l[m],p={},q=0;q<n.length;q++)p[n[q][0]]=Array.prototype.slice.call(n[q],1);oc.push(p)}sc=Z;Te();fi.gd();we();ld=!1;md=0;if("interactive"==
B.readyState&&!B.createEventObject||"complete"==B.readyState)od();else{kb(B,"DOMContentLoaded",od);kb(B,"readystatechange",od);if(B.createEventObject&&B.documentElement.doScroll){var r=!0;try{r=!z.frameElement}catch(t){}r&&pd()}kb(z,"load",od)}oe=!1;"complete"===B.readyState?qe():kb(z,"load",qe);a:{
if(!Bd)break a;Ed();Hd=void 0;Id={};Fd={};Kd=void 0;Jd={};Gd="";Ld=Cd();z.setInterval(Ed,864E5);}Nc=(new Date).getTime()}};fi.Yd();
})() | !d&&"http:"==z.location.protocol;e&&(e=2!==hh());return(e?b:a)+c},ih=function(a,b){if(De()){b&&I(b)}else jb(a,b)};
var jh=function(a){var b=0;return b},kh=function(a){},lh=function(a){var b=!1;return b},mh=function(a,b){var c;a:{if(a&& |
task_handler_types.go | // Copyright Amazon.com Inc. or its affiliates. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License"). You may
// not use this file except in compliance with the License. A copy of the
// License is located at
//
// http://aws.amazon.com/apache2.0/
//
// or in the "license" file accompanying this file. This file is distributed
// on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
// express or implied. See the License for the specific language governing
// permissions and limitations under the License.
package eventhandler
import (
"container/list"
"fmt"
"sync"
"github.com/aws/amazon-ecs-agent/agent/api"
apicontainer "github.com/aws/amazon-ecs-agent/agent/api/container"
apicontainerstatus "github.com/aws/amazon-ecs-agent/agent/api/container/status"
apitask "github.com/aws/amazon-ecs-agent/agent/api/task"
apitaskstatus "github.com/aws/amazon-ecs-agent/agent/api/task/status"
"github.com/aws/amazon-ecs-agent/agent/data"
"github.com/aws/amazon-ecs-agent/agent/utils/retry"
"github.com/cihub/seelog"
)
// a state change that may have a container and, optionally, a task event to
// send
type sendableEvent struct {
// Either is a contaienr event or a task event
isContainerEvent bool
containerSent bool
containerChange api.ContainerStateChange
taskSent bool
taskChange api.TaskStateChange
lock sync.RWMutex
}
func newSendableTaskEvent(event api.TaskStateChange) *sendableEvent {
return &sendableEvent{
isContainerEvent: false,
taskSent: false,
taskChange: event,
}
}
func (event *sendableEvent) taskArn() string {
if event.isContainerEvent {
return event.containerChange.TaskArn
}
return event.taskChange.TaskARN
}
// taskShouldBeSent checks whether the event should be sent, this includes
// both task state change and container/managed agent state change events
func (event *sendableEvent) taskShouldBeSent() bool {
event.lock.RLock()
defer event.lock.RUnlock()
if event.isContainerEvent {
return false
}
tevent := event.taskChange
if event.taskSent {
return false // redundant event
}
// task and container change event should have task != nil
if tevent.Task == nil {
return false
}
// Task event should be sent
if tevent.Task.GetSentStatus() < tevent.Status {
return true
}
// Container event should be sent
for _, containerStateChange := range tevent.Containers {
container := containerStateChange.Container
if container.GetSentStatus() < container.GetKnownStatus() {
return true
}
}
// Managed agent event should be sent
for _, managedAgentStateChange := range tevent.ManagedAgents {
managedAgentName := managedAgentStateChange.Name
container := managedAgentStateChange.Container
if container.GetManagedAgentSentStatus(managedAgentName) != container.GetManagedAgentStatus(managedAgentName) {
return true
}
}
return false
}
func (event *sendableEvent) taskAttachmentShouldBeSent() bool {
event.lock.RLock()
defer event.lock.RUnlock()
if event.isContainerEvent |
tevent := event.taskChange
return tevent.Status == apitaskstatus.TaskStatusNone && // Task Status is not set for attachments as task record has yet to be streamed down
tevent.Attachment != nil && // Task has attachment records
!tevent.Attachment.HasExpired() && // ENI attachment ack timestamp hasn't expired
!tevent.Attachment.IsSent() // Task status hasn't already been sent
}
func (event *sendableEvent) containerShouldBeSent() bool {
event.lock.RLock()
defer event.lock.RUnlock()
if !event.isContainerEvent {
return false
}
cevent := event.containerChange
if event.containerSent || (cevent.Container != nil && cevent.Container.GetSentStatus() >= cevent.Status) {
return false
}
return true
}
func (event *sendableEvent) setSent() {
event.lock.Lock()
defer event.lock.Unlock()
if event.isContainerEvent {
event.containerSent = true
} else {
event.taskSent = true
}
}
// send tries to send an event, specified by 'eventToSubmit', of type
// 'eventType' to ECS
func (event *sendableEvent) send(
sendStatusToECS sendStatusChangeToECS,
setChangeSent setStatusSent,
eventType string,
client api.ECSClient,
eventToSubmit *list.Element,
dataClient data.Client,
backoff retry.Backoff,
taskEvents *taskSendableEvents) error {
seelog.Infof("TaskHandler: Sending %s change: %s", eventType, event.toString())
// Try submitting the change to ECS
if err := sendStatusToECS(client, event); err != nil {
seelog.Errorf("TaskHandler: Unretriable error submitting %s state change [%s]: %v",
eventType, event.toString(), err)
return err
}
// submitted; ensure we don't retry it
event.setSent()
// Mark event as sent
setChangeSent(event, dataClient)
seelog.Debugf("TaskHandler: Submitted task state change: %s", event.toString())
taskEvents.events.Remove(eventToSubmit)
backoff.Reset()
return nil
}
// sendStatusChangeToECS defines a function type for invoking the appropriate ECS state change API
type sendStatusChangeToECS func(client api.ECSClient, event *sendableEvent) error
// sendContainerStatusToECS invokes the SubmitContainerStateChange API to send a
// container status change to ECS
func sendContainerStatusToECS(client api.ECSClient, event *sendableEvent) error {
return client.SubmitContainerStateChange(event.containerChange)
}
// sendTaskStatusToECS invokes the SubmitTaskStateChange API to send a task
// status change to ECS
func sendTaskStatusToECS(client api.ECSClient, event *sendableEvent) error {
return client.SubmitTaskStateChange(event.taskChange)
}
// setStatusSent defines a function type to mark the event as sent
type setStatusSent func(event *sendableEvent, dataClient data.Client)
// setContainerChangeSent sets the event's container change object as sent
func setContainerChangeSent(event *sendableEvent, dataClient data.Client) {
containerChangeStatus := event.containerChange.Status
container := event.containerChange.Container
if container != nil && container.GetSentStatus() < containerChangeStatus {
updateContainerSentStatus(container, containerChangeStatus, dataClient)
}
}
// setTaskChangeSent sets the event's task change object as sent
func setTaskChangeSent(event *sendableEvent, dataClient data.Client) {
taskChangeStatus := event.taskChange.Status
task := event.taskChange.Task
if task != nil && task.GetSentStatus() < taskChangeStatus {
updataTaskSentStatus(task, taskChangeStatus, dataClient)
}
for _, containerStateChange := range event.taskChange.Containers {
updateContainerSentStatus(containerStateChange.Container, containerStateChange.Status, dataClient)
}
for _, managedAgentStateChange := range event.taskChange.ManagedAgents {
updateManagedAgentSentStatus(managedAgentStateChange.Container, managedAgentStateChange.Name, managedAgentStateChange.Status, dataClient)
}
}
// setTaskAttachmentSent sets the event's task attachment object as sent
func setTaskAttachmentSent(event *sendableEvent, dataClient data.Client) {
if event.taskChange.Attachment != nil {
attachment := event.taskChange.Attachment
attachment.SetSentStatus()
attachment.StopAckTimer()
err := dataClient.SaveENIAttachment(attachment)
if err != nil {
seelog.Errorf("Failed to update attachment sent status in database for attachment %s: %v", attachment.AttachmentARN, err)
}
}
}
func (event *sendableEvent) toString() string {
event.lock.RLock()
defer event.lock.RUnlock()
if event.isContainerEvent {
return "ContainerChange: [" + event.containerChange.String() + fmt.Sprintf("] sent: %t", event.containerSent)
} else {
return "TaskChange: [" + event.taskChange.String() + fmt.Sprintf("] sent: %t", event.taskSent)
}
}
func updataTaskSentStatus(task *apitask.Task, status apitaskstatus.TaskStatus, dataClient data.Client) {
task.SetSentStatus(status)
err := dataClient.SaveTask(task)
if err != nil {
seelog.Errorf("Failed to update task sent status in database for task %s: %v", task.Arn, err)
}
}
func updateContainerSentStatus(container *apicontainer.Container, status apicontainerstatus.ContainerStatus, dataClient data.Client) {
if container.GetSentStatus() < status {
container.SetSentStatus(status)
if err := dataClient.SaveContainer(container); err != nil {
seelog.Errorf("Failed to update container sent status in database for container %s: %v", container.Name, err)
}
}
}
func updateManagedAgentSentStatus(container *apicontainer.Container, managedAgentName string, status apicontainerstatus.ManagedAgentStatus, dataClient data.Client) {
if container.GetManagedAgentSentStatus(managedAgentName) != status {
container.UpdateManagedAgentSentStatus(managedAgentName, status)
if err := dataClient.SaveContainer(container); err != nil {
seelog.Errorf("Failed to update %s managed agent sent status in database for container %s: %v", managedAgentName, container.Name, err)
}
}
}
| {
return false
} |
path.py | from onegov.ballot import Ballot
from onegov.ballot import BallotCollection
from onegov.ballot import Candidate
from onegov.ballot import CandidateCollection
from onegov.ballot import Election
from onegov.ballot import ElectionCollection
from onegov.ballot import ElectionCompound
from onegov.ballot import ElectionCompoundCollection
from onegov.ballot import List
from onegov.ballot import ListCollection
from onegov.ballot import Vote
from onegov.ballot import VoteCollection
from onegov.core.converters import extended_date_converter
from onegov.core.i18n import SiteLocale
from onegov.election_day import ElectionDayApp
from onegov.election_day.collections import ArchivedResultCollection
from onegov.election_day.collections import DataSourceCollection
from onegov.election_day.collections import DataSourceItemCollection
from onegov.election_day.collections import EmailSubscriberCollection
from onegov.election_day.collections import ScreenCollection
from onegov.election_day.collections import SearchableArchivedResultCollection
from onegov.election_day.collections import SmsSubscriberCollection
from onegov.election_day.collections import SubscriberCollection
from onegov.election_day.collections import UploadTokenCollection
from onegov.election_day.models import DataSource
from onegov.election_day.models import DataSourceItem
from onegov.election_day.models import Principal
from onegov.election_day.models import Screen
from onegov.election_day.models import Subscriber
from onegov.election_day.models import UploadToken
from onegov.user import Auth
from uuid import UUID
@ElectionDayApp.path(
model=Auth,
path='/auth'
)
def get_auth(request, to='/'):
return Auth.from_request(request, to)
@ElectionDayApp.path(
model=Principal,
path='/'
)
def get_principal(app):
return app.principal
@ElectionDayApp.path(
model=ElectionCollection,
path='/manage/elections',
converters=dict(
page=int
)
)
def get_manage_elections(app, page=0):
return ElectionCollection(app.session(), page=page)
@ElectionDayApp.path(
model=ElectionCompoundCollection,
path='/manage/election-compounds',
converters=dict(
page=int
)
)
def get_manage_election_compsites(app, page=0):
return ElectionCompoundCollection(app.session(), page=page)
@ElectionDayApp.path(
model=VoteCollection,
path='/manage/votes',
converters=dict(
page=int
)
)
def get_manage_votes(app, page=0):
return VoteCollection(app.session(), page=page)
@ElectionDayApp.path(
model=SmsSubscriberCollection,
path='/manage/subscribers/sms',
converters=dict(
page=int
)
)
def get_manage_sms_subscribers(app, page=0, term=None):
return SmsSubscriberCollection(app.session(), page=page, term=term)
@ElectionDayApp.path(
model=EmailSubscriberCollection,
path='/manage/subscribers/email',
converters=dict(
page=int
)
)
def get_manage_email_subscribers(app, page=0, term=None):
return EmailSubscriberCollection(app.session(), page=page, term=term)
@ElectionDayApp.path(
model=UploadTokenCollection,
path='/manage/upload-tokens'
)
def get_manage_upload_tokens(app):
return UploadTokenCollection(app.session())
@ElectionDayApp.path(
model=DataSourceCollection,
path='/manage/sources',
converters=dict(
page=int
)
)
def get_manage_data_sources(app, page=0):
return DataSourceCollection(app.session(), page=page)
@ElectionDayApp.path(
model=DataSourceItemCollection,
path='/manage/source/{id}/items',
converters=dict(
id=UUID,
page=int
)
)
def get_manage_data_source_items(app, id, page=0):
return DataSourceItemCollection(app.session(), id, page=page)
@ElectionDayApp.path(
model=Election,
path='/election/{id}',
)
def get_election(app, id):
return ElectionCollection(app.session()).by_id(id)
@ElectionDayApp.path(
model=Candidate,
path='/candidate/{id}',
converters=dict(
id=UUID
)
)
def get_candidate(app, id):
return CandidateCollection(app.session()).by_id(id)
@ElectionDayApp.path(
model=List,
path='/list/{id}',
converters=dict(
id=UUID
)
)
def get_list(app, id):
return ListCollection(app.session()).by_id(id)
@ElectionDayApp.path(
model=ElectionCompound,
path='/elections/{id}'
)
def get_election_compound(app, id):
return ElectionCompoundCollection(app.session()).by_id(id)
@ElectionDayApp.path(
model=Vote,
path='/vote/{id}'
)
def get_vote(app, id):
return VoteCollection(app.session()).by_id(id)
@ElectionDayApp.path(
model=Ballot,
path='/ballot/{id}',
converters=dict(
id=UUID
)
)
def get_ballot(app, id):
return BallotCollection(app.session()).by_id(id)
@ElectionDayApp.path(
model=Subscriber,
path='/subscriber/{id}',
converters=dict(
id=UUID
)
)
def | (app, id):
return SubscriberCollection(app.session()).by_id(id)
@ElectionDayApp.path(
model=UploadToken,
path='/upload-token/{id}',
converters=dict(
id=UUID
)
)
def get_upload_token(app, id):
return UploadTokenCollection(app.session()).by_id(id)
@ElectionDayApp.path(
model=DataSource,
path='/data-source/{id}',
converters=dict(
id=UUID
)
)
def get_data_source(app, id):
return DataSourceCollection(app.session()).by_id(id)
@ElectionDayApp.path(
model=DataSourceItem,
path='/data-source-item/{id}',
converters=dict(
id=UUID
)
)
def get_data_source_item(app, id):
return DataSourceItemCollection(app.session()).by_id(id)
@ElectionDayApp.path(
model=ArchivedResultCollection,
path='/archive/{date}'
)
def get_archive_by_year(app, date):
return ArchivedResultCollection(app.session(), date)
@ElectionDayApp.path(
model=SearchableArchivedResultCollection,
path='archive-search/{item_type}',
converters=dict(
from_date=extended_date_converter,
to_date=extended_date_converter,
domains=[str],
answers=[str],
page=int
)
)
def get_archive_search(
app,
from_date=None,
to_date=None,
answers=None,
item_type=None,
domains=None,
term=None,
page=0
):
return SearchableArchivedResultCollection.for_item_type(
app.session(),
item_type,
to_date=to_date,
from_date=from_date,
answers=answers,
domains=domains,
term=term,
page=page
)
@ElectionDayApp.path(
model=SiteLocale,
path='/locale/{locale}'
)
def get_locale(request, app, locale, to=None):
to = to or request.link(app.principal)
return SiteLocale.for_path(app, locale, to)
@ElectionDayApp.path(
model=ScreenCollection,
path='/manage/screens',
converters=dict(
page=int
)
)
def get_manage_screens(app, page=0):
return ScreenCollection(app.session(), page)
@ElectionDayApp.path(
model=Screen,
path='/screen/{number}',
converters=dict(
number=int
)
)
def get_screen(app, number):
return ScreenCollection(app.session()).by_number(number)
| get_subscriber |
publicroomsapi.go | // Copyright 2017 Vector Creations Ltd
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package publicroomsapi
import (
"github.com/matrix-org/dendrite/clientapi/auth/storage/devices"
"github.com/matrix-org/dendrite/internal/basecomponent"
"github.com/matrix-org/dendrite/publicroomsapi/consumers"
"github.com/matrix-org/dendrite/publicroomsapi/routing"
"github.com/matrix-org/dendrite/publicroomsapi/storage"
"github.com/matrix-org/dendrite/publicroomsapi/types"
roomserverAPI "github.com/matrix-org/dendrite/roomserver/api"
"github.com/matrix-org/gomatrixserverlib"
"github.com/sirupsen/logrus"
)
// SetupPublicRoomsAPIComponent sets up and registers HTTP handlers for the PublicRoomsAPI
// component.
func | (
base *basecomponent.BaseDendrite,
deviceDB devices.Database,
publicRoomsDB storage.Database,
rsAPI roomserverAPI.RoomserverInternalAPI,
fedClient *gomatrixserverlib.FederationClient,
extRoomsProvider types.ExternalPublicRoomsProvider,
) {
rsConsumer := consumers.NewOutputRoomEventConsumer(
base.Cfg, base.KafkaConsumer, publicRoomsDB, rsAPI,
)
if err := rsConsumer.Start(); err != nil {
logrus.WithError(err).Panic("failed to start public rooms server consumer")
}
routing.Setup(base.APIMux, deviceDB, publicRoomsDB, rsAPI, fedClient, extRoomsProvider)
}
| SetupPublicRoomsAPIComponent |
environment.ts | // This file can be replaced during build by using the `fileReplacements` array.
// `ng build --prod` replaces `environment.ts` with `environment.prod.ts`.
// The list of file replacements can be found in `angular.json`.
export const environment = {
production: true,
apiUrl :'https://api.github.com/users/', | gitToken: '?access_token=f8e9ab0a75524787668f7e95c0f97342826200e4'
};
/*
* For easier debugging in development mode, you can import the following file
* to ignore zone related error stack frames such as `zone.run`, `zoneDelegate.invokeTask`.
*
* This import should be commented out in production mode because it will have a negative impact
* on performance if an error is thrown.
*/
// import 'zone.js/dist/zone-error'; // Included with Angular CLI. | |
Bathtub.js | import createSvgIcon from './utils/createSvgIcon';
import { jsx as _jsx } from "react/jsx-runtime";
export default createSvgIcon([/*#__PURE__*/_jsx("circle", {
cx: "7",
cy: "7", | r: "2"
}, "0"), /*#__PURE__*/_jsx("path", {
d: "M20 13V4.83C20 3.27 18.73 2 17.17 2c-.75 0-1.47.3-2 .83l-1.25 1.25c-.16-.05-.33-.08-.51-.08-.4 0-.77.12-1.08.32l2.76 2.76c.2-.31.32-.68.32-1.08 0-.18-.03-.34-.07-.51l1.25-1.25c.15-.15.36-.24.58-.24.46 0 .83.37.83.83V13h-6.85c-.3-.21-.57-.45-.82-.72l-1.4-1.55c-.19-.21-.43-.38-.69-.5-.31-.15-.65-.23-1-.23C6 10.01 5 11.01 5 12.25V13H2v6c0 1.1.9 2 2 2 0 .55.45 1 1 1h14c.55 0 1-.45 1-1 1.1 0 2-.9 2-2v-6h-2z"
}, "1")], 'Bathtub'); |
|
tests.rs | use super::*;
use ::divvy::Str;
#[test]
fn entry_default() {
assert_eq!(Entry::<()>::default(), Nil);
}
#[test]
fn table_setting_header() {
let mut x: Table<()> = Table::new();
x.set_header(false);
assert_eq!(x.header, false);
x.set_header(true);
assert_eq!(x.header, true);
}
#[test]
fn table_is_empty() {
let mut r: Table<()> = Table::default();
assert_eq!(r.is_empty(), true);
r.add_row(vec![Entry::Nil].into_iter());
assert_eq!(r.is_empty(), false);
}
#[test]
fn new_table_repr() {
let x: Table<()> = Table::new();
assert_eq!(x.data.is_empty(), true);
assert_eq!(x.header, true);
assert_eq!(x.cols, 0);
}
#[test]
fn table_repr_add() {
let mut x: Table<()> = Table::new();
// add a row
x.add_row(vec![Nil, Num(101.into()), Obj(())].into_iter());
assert_eq!(x.data, vec![vec![Nil, Num(101.into()), Obj(())]]);
// add a short row
x.add_row(vec![Obj(())].into_iter());
assert_eq!(
x.data,
vec![vec![Nil, Num(101.into()), Obj(())], vec![Obj(()), Nil, Nil]]
);
// add a long row
x.add_row(repeat(Obj(())).take(4));
assert_eq!(
x.data,
vec![
vec![Nil, Num(101.into()), Obj(()), Nil],
vec![Obj(()), Nil, Nil, Nil],
vec![Obj(()); 4]
]
);
}
#[test]
fn table_add_rows() {
let mut x: Table<()> = Table::new();
let mut v = Vec::new();
for i in 1..=100 {
v.push(vec![Obj(()); i]);
}
x.add_rows(v.into_iter().map(|x| x.into_iter())); // borrowed version
let ans: Vec<_> = (1..=100)
.map(|i| {
let mut v = vec![Obj(()); i];
v.resize_with(100, Default::default);
v
})
.collect();
assert_eq!(x.data, ans);
}
#[test]
fn table_add_col() {
let mut x: Table<()> = Table::default();
x.add_col(once(Obj(())));
assert_eq!(x.data, vec![vec![Obj(())]]);
// longer col
x.add_col(vec![Nil, Num(101.into()), Num(202.into())].into_iter());
assert_eq!(
x.data,
vec![
vec![Obj(()), Nil],
vec![Nil, Num(101.into())],
vec![Nil, Num(202.into())]
]
);
// shorter col
x.add_col(once(Num(303.into())));
assert_eq!(
x.data,
vec![
vec![Obj(()), Nil, Num(303.into())],
vec![Nil, Num(101.into()), Nil],
vec![Nil, Num(202.into()), Nil]
]
);
}
#[test]
fn table_add_cols() {
let mut x = Table::new();
let v: Vec<_> = (1..=100).map(|i| vec![Obj(()); i]).collect();
x.add_cols(v.into_iter().map(|x| x.into_iter()));
let ans: Vec<_> = (0..100)
.map(|i| {
let mut v = vec![Nil; i];
v.resize(100, Obj(()));
v
})
.collect();
assert_eq!(x.data, ans);
}
#[test]
fn | () {
let mut r: Table<()> = Table::default();
assert_eq!(r.is_data_empty(), true);
r.add_row(vec![Entry::Nil].into_iter());
assert_eq!(r.is_empty(), false);
assert_eq!(r.is_data_empty(), true);
r.set_header(false);
assert_eq!(r.is_data_empty(), false);
r.set_header(true);
r.add_row(vec![Entry::Nil].into_iter());
assert_eq!(r.is_data_empty(), false);
}
#[test]
fn test_lens() {
let mut r: Table<()> = Table::default();
assert_eq!(r.rows_len(), 0);
assert_eq!(r.cols_len(), 0);
r.add_row(vec![Nil, Nil].into_iter());
assert_eq!(r.rows_len(), 1);
assert_eq!(r.cols_len(), 2);
}
#[test]
fn test_iteration() {
let mut r: Table<()> = Table::new();
r.add_rows(
vec![
vec![Nil, Num(101.into())].into_iter(),
vec![Num(202.into()), Obj(())].into_iter(),
]
.into_iter(),
);
let mut row = r.row(0).unwrap();
assert_eq!(row.next(), Some(&Nil));
assert_eq!(row.next(), Some(&Num(101.into())));
let mut row = r.row(1).unwrap();
assert_eq!(row.next(), Some(&Num(202.into())));
assert_eq!(row.next(), Some(&Obj(())));
assert_eq!(r.col(2).is_none(), true);
let mut col = r.col(0).unwrap();
assert_eq!(col.next(), Some(&Nil));
assert_eq!(col.next(), Some(&Num(202.into())));
let mut col = r.col(1).unwrap();
assert_eq!(col.next(), Some(&Num(101.into())));
assert_eq!(col.next(), Some(&Obj(())));
assert_eq!(r.col(2).is_none(), true);
// iteration
let mut rows = r.rows();
let mut row = rows.next().unwrap();
assert_eq!(row.next(), Some(&Nil));
assert_eq!(row.next(), Some(&Num(101.into())));
let mut row = rows.next().unwrap();
assert_eq!(row.next(), Some(&Num(202.into())));
assert_eq!(row.next(), Some(&Obj(())));
assert_eq!(row.next().is_none(), true);
let mut cols = r.cols();
let mut col = cols.next().unwrap();
assert_eq!(col.next(), Some(&Nil));
assert_eq!(col.next(), Some(&Num(202.into())));
let mut col = cols.next().unwrap();
assert_eq!(col.next(), Some(&Num(101.into())));
assert_eq!(col.next(), Some(&Obj(())));
assert_eq!(cols.next().is_none(), true);
}
#[test]
fn entry_helpers() {
let e = Entry::<()>::Nil;
assert_eq!(e.is_nil(), true);
assert_eq!(e.is_num(), false);
let e = Entry::<()>::Num(101.into());
assert_eq!(e.is_num(), true);
assert_eq!(e.is_obj(), false);
let e = Entry::Obj(());
assert_eq!(e.is_obj(), true);
assert_eq!(e.is_nil(), false);
}
#[test]
fn test_borrowing_example() {
let mut x = Table::new();
x.add_rows(
vec![vec![Nil, Nil, Obj("Hello")], vec![Num(101.into())], vec![]]
.into_iter()
.map(|x| x.into_iter()),
);
let mut brw = Table::new();
brw.add_rows(x.rows());
assert_eq!(brw, x);
}
#[test]
fn inserting_rows_columns() {
let mut repr = <Table<()>>::new();
repr.insert_row(0, [Obj(()), Obj(())].iter());
let mut exp = Table::new();
exp.add_row([Obj(()), Obj(())].iter());
assert_eq!(repr, exp);
repr.insert_row(0, [Num(1.into())].iter());
let mut exp = Table::new();
exp.add_rows(vec![[Num(1.into()), Nil].iter(), [Obj(()), Obj(())].iter()].into_iter());
assert_eq!(repr, exp);
repr.insert_col(2, [Num(2.into())].iter());
let mut exp = Table::new();
exp.add_rows(
vec![
[Num(1.into()), Nil, Num(2.into())].iter(),
[Obj(()), Obj(()), Nil].iter(),
]
.into_iter(),
);
assert_eq!(repr, exp);
repr.insert_col(1, [Obj(()), Num(3.into()), Obj(())].iter());
let mut exp = Table::new();
exp.add_rows(
vec![
[Num(1.into()), Obj(()), Nil, Num(2.into())].iter(),
[Obj(()), Num(3.into()), Obj(()), Nil].iter(),
[Nil, Obj(()), Nil, Nil].iter(),
]
.into_iter(),
);
assert_eq!(repr, exp);
repr.insert_row(
1,
[
Num(0.into()),
Num(1.into()),
Num(2.into()),
Num(3.into()),
Num(4.into()),
]
.iter(),
);
let mut exp = Table::new();
exp.add_rows(
vec![
[Num(1.into()), Obj(()), Nil, Num(2.into()), Nil].iter(),
[
Num(0.into()),
Num(1.into()),
Num(2.into()),
Num(3.into()),
Num(4.into()),
]
.iter(),
[Obj(()), Num(3.into()), Obj(()), Nil, Nil].iter(),
[Nil, Obj(()), Nil, Nil, Nil].iter(),
]
.into_iter(),
);
assert_eq!(repr, exp);
}
#[test]
#[should_panic]
fn insert_row_panic() {
let mut repr: Table<()> = Table::new();
repr.insert_row(1, [Nil, Nil].iter());
}
#[test]
#[should_panic]
fn insert_col_panic() {
let mut repr: Table<()> = Table::new();
repr.insert_col(1, [Nil, Nil].iter());
}
#[test]
fn entry_as_str() {
assert_eq!(Entry::<Str>::Nil.as_str(), "-");
assert_eq!(Entry::<Str>::Num(3.14.into()).as_str(), "3.14");
assert_eq!(Entry::<Str>::Obj("what".into()).as_str(), "what");
}
#[test]
fn remove_rows_columns() {
let mut table = <Table<()>>::new();
table.add_rows(
vec![
vec![Obj(()), Num(1.into()), Nil],
vec![Num(2.into()), Nil, Obj(())],
vec![Nil, Obj(()), Nil],
]
.into_iter()
.map(|x| x.into_iter()),
);
assert_eq!((table.rows_len(), table.cols_len()), (3, 3));
table.remove_row(1);
assert_eq!((table.rows_len(), table.cols_len()), (2, 3));
assert_eq!(
table.data,
vec![vec![Obj(()), Num(1.into()), Nil], vec![Nil, Obj(()), Nil]]
);
table.remove_col(1);
assert_eq!((table.rows_len(), table.cols_len()), (2, 2));
assert_eq!(table.data, vec![vec![Obj(()), Nil], vec![Nil, Nil]]);
table.remove_col(1);
assert_eq!((table.rows_len(), table.cols_len()), (2, 1));
// check that cols gets updated
let mut t = Table::new();
t.add_rows(
vec![vec![Obj(())], vec![Nil]]
.into_iter()
.map(|x| x.into_iter()),
);
assert_eq!(table, t);
}
#[test]
#[should_panic]
fn remove_row_panic() {
let mut table = <Table<()>>::new();
table.remove_row(0);
}
#[test]
#[should_panic]
fn remove_col_panic() {
let mut table = <Table<()>>::new();
table.remove_col(0);
}
#[test]
fn entry_ordering() {
use std::cmp::{Ordering::*, *};
// Nil LHS
let lhs: Entry<()> = Nil;
let rhs = Nil;
assert_eq!(lhs.partial_cmp(&rhs), Some(Equal));
assert!(lhs == rhs);
assert!(!(lhs > rhs));
assert!(!(lhs < rhs));
let lhs = Nil;
let rhs = Obj("hello");
assert_eq!(lhs.partial_cmp(&rhs), None);
assert!(lhs != rhs);
assert!(!(lhs > rhs));
assert!(!(lhs < rhs));
let lhs: Entry<&str> = Nil;
let rhs = Num(101.into());
assert_eq!(lhs.partial_cmp(&rhs), None);
assert!(lhs != rhs);
assert!(!(lhs > rhs));
assert!(!(lhs < rhs));
// Num LHS
let lhs: Entry<&str> = Num(101.into());
let rhs = Nil;
assert_eq!(lhs.partial_cmp(&rhs), None);
assert!(lhs != rhs);
assert!(!(lhs > rhs));
assert!(!(lhs < rhs));
let lhs: Entry<&str> = Num(101.into());
let rhs = Num(102.into());
assert_eq!(lhs.partial_cmp(&rhs), Some(Less));
assert!(lhs != rhs);
assert!(!(lhs > rhs));
assert!(lhs < rhs);
let lhs: Entry<&str> = Num(101.into());
let rhs = Obj("hello");
assert_eq!(lhs.partial_cmp(&rhs), None);
assert!(lhs != rhs);
assert!(!(lhs > rhs));
assert!(!(lhs < rhs));
// Obj LHS
let lhs: Entry<&str> = Obj("a");
let rhs = Nil;
assert_eq!(lhs.partial_cmp(&rhs), None);
assert!(lhs != rhs);
assert!(!(lhs > rhs));
assert!(!(lhs < rhs));
let lhs: Entry<&str> = Obj("a");
let rhs = Num(102.into());
assert_eq!(lhs.partial_cmp(&rhs), None);
assert!(lhs != rhs);
assert!(!(lhs > rhs));
assert!(!(lhs < rhs));
let lhs: Entry<&str> = Obj("a");
let rhs = Obj("b");
assert_eq!(lhs.partial_cmp(&rhs), Some(Less));
assert!(lhs != rhs);
assert!(!(lhs > rhs));
assert!(lhs < rhs);
}
#[test]
fn table_from_vector_of_vectors() {
let vs = vec![
vec![Nil, Num(101.into()), Obj(())],
vec![],
vec![Num(202.into())],
];
let table = Table::from(vs);
assert_eq!(table.cols, 3);
assert_eq!(table.header, true);
assert_eq!(
table.data,
vec![
vec![Nil, Num(101.into()), Obj(())],
vec![Nil, Nil, Nil],
vec![Num(202.into()), Nil, Nil]
]
);
}
#[test]
fn test_cloning() {
use Entry::*;
let vecs = vec![
vec![Nil, Obj(()), Num(101.into())],
vec![Obj(()), Obj(()), Num(303.into())],
vec![Obj(()), Num(303.into())],
];
let table = Table::from(vecs.clone());
let table_clone = table.clone();
let table_2 = Table::from(vecs);
assert_eq!(table, table_clone);
assert_eq!(table, table_2);
assert_eq!(table_2, table_clone);
}
| test_is_data_empty |
ManagedNetworkingControlGroup.tsx | import React from 'react';
import { Split, SplitItem } from '@patternfly/react-core';
import { RadioField } from '../../ui';
export interface ManagedNetworkingControlGroupProps { |
export const ManagedNetworkingControlGroup = ({
disabled = false,
}: ManagedNetworkingControlGroupProps) => {
const GROUP_NAME = 'managedNetworkingType';
return (
<Split hasGutter>
<SplitItem>
<RadioField
name={GROUP_NAME}
isDisabled={disabled}
value={'clusterManaged'}
label="Cluster-Managed Networking"
/>
</SplitItem>
<SplitItem />
<SplitItem>
<RadioField
name={GROUP_NAME}
isDisabled={disabled}
value={'userManaged'}
label="User-Managed Networking"
/>
</SplitItem>
</Split>
);
}; | disabled: boolean;
} |
Subsets and Splits